var/home/core/zuul-output/0000755000175000017500000000000015140062716014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015140072323015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000315272315140072200020252 0ustar corecoretikubelet.log_o[;r)Br'o-n(!9t%Cs7}g/غIs,r.k9GfB…~{6b}Wߟ/nm͊wqɻlOxN_ ~𒆷7̗8zTY\].f}嗷ovϷw_>on3cvX~egQBeH,nWb m/m}*L~AzHev_uαHJ2E$(Ͽ|/+k*z>p R⥑gF)49)(oՈ7_k0m^p9PneQn͂YEeeɹ ^ʙ|ʕ0MۂAraZR}@E1%]˜(O)X(6I;Ff"mcI۫d@FNsdxό?2$&tg*Y%\ߘfDP'F%Ab*d@e˛H,љ:72 2ƴ40tr>PYD'vt'oI¢w}o٬owko%gQ(%t#NL֜ eh&Ƨ,RH 4*,!SD 1Ed_wkxdL3F;/u7Taqu5Ոӄp\2dd$YLYG(#?%U?hB\;ErE& SOZXHBWy|iZ~hal\t2Hgb*t--ߖ|Hp(-J C?>:zR{܃ lM6_OފߍO1nԝG?ƥF%QV5pDVHwԡ/.2h{qۀK8yUOdssdMvw`21ɻ]/ƛ"@8(PN_,_0;o_x+Vy<h\dN9:bġ7 -Pwȹl;M@n̞Qj_P\ Q]GcPN;e7Vtś98m1<:|a+.:a4nՒ,]LF0);I$>ga5"f[B[fhT/ɾgm\Sj#3hEEH*Nf äE@O0~y[쾋t=iYhșC 5ܩa!ǛfGtzz*з 55E9Fa?Zk80ݞN|:AОNo;Ⱦzu\0Ac/T%;m ~S`#u.Џ1qNp&gK60nqtƅ": C@!P q]G0,d%1}Uhs;H?)M"뛲@.Cs*H _0:P.BvJ>mIyVVTF% tFL-*$tZm2AČAE9ϯ~ihFf&6,֗&̴+s~x?53!}~Z[F)RH?uvͪ _5l *7h?cF_]CNnW)F5d,0SSNK9ް4:ÒozsB<^+鄌4:B%cXhK I}!5 YM%o<>"ہ)Za@Ι}YJz{ɛr|hxY/O$Zøu32EʉD'MS1}t i:Y`cФIX0$lη˽`!i:ګPSPٔ3@5;ȕ}PkڪH9' |":", 1Ҫ8 %lg&:2JC!Mjܽ#`PJWP4Q2:IGӸۡshN+60#:mufe߿~Y,iǑ wVq*T+ w%fx6 %u̩1hӰc%AYW ZY~a_6_yWf`rVA,f=A}h&V<UKkZ{iqi :íy˧FR1u)X9 f΁U ~5batx|ELU:T'T[G*ݧ ؽZK̡O6rLmȰ (T$ n#b@hpj:˾ojs)M/8`$:) X+ҧSaۥzw}^P1J%+P:Dsƫ%z; +g 0հc0E) 3jƯ?e|miȄ{g6R/wD_tՄ.F+HP'AE; J j"b~\b$BrW XWz<%fpG"m%6PGEH^*JL֗J)oEv[Ң߃x[䚒}0BOnYr猸p$nu?ݣ RF]NHw2k혿q}lrCy u)xF$Z83Ec罋}[εUX%}< ݻln"sv&{b%^AAoۺ(I#hKD:Bߩ#蘈f=9oN*.Ѓ M#JC1?tean`3-SHq$2[ĜSjXRx?}-m6Mw'yR3q㕐)HW'X1BEb $xd(21i)//_і/Cޮm0VKz>I; >d[5Z=4>5!!T@[4 1.x XF`,?Hh]b-#3J( &uz u8.00-(9ŽZcX Jٯ^蒋*k.\MA/Xp9VqNo}#ƓOފgv[r*hy| IϭR-$$m!-W'wTi:4F5^z3/[{1LK[2nM|[<\t=3^qOp4y}|B}yu}뚬"P.ԘBn방u<#< A Q(j%e1!gkqiP(-ʢ-b7$66|*f\#ߍp{8sx[o%}wS`ýͽ>^U_S1VF20:d T2$47mSl*#lzFP_3yb.63>NKnJۦ^4*rB쑓:5Ǧ٨C.1`mU]+y_:,eXX맻c5ޖSwe݊O4L)69 War)|VϟT;Cq%KK-*i ѩQٰ`DݎGu( 꿢\cXn }7Ҫa nG{Y bcWa?\34 P U!7 _* kTuwmUr%ԀjƮĀdU#^ۈӕ3ΊeBO`^}ܖj49lnAvoI "%\;OF& wctغBܮl##mϸ.6p5k0C5PdKB g:=G<$w 24 6e/!~߽f)Q UbshY5mseڠ5_m4(sgz1v&YN2姟d4"?oWNW݃yh~%DTt^W7q.@ L⃳662G,:* $: e~7[/P%F on~$dƹɥO"dޢt|BpYqc@P`ڄj҆anCѢMU sf`Yɇك]@Rɯ?ٽf? ntպ$ˣ>TDNIGW .Z#YmDvS|]F)5vSsiExţ=8#r&ᘡĩDȈ\d cRKw*#zJ9tT :<XK*ɤwoJarExfKB4t@y[6OO6qDfEz]1,ʹB֒H ֱw;SpM8hGG&ƫEJި_1N`Ac2 GP)"nD&D #-aGoz%<ѡh (jF9L`fMN]eʮ"3_q7:.rRGT;}:֪a$)gPSj0j3hLư/7:D-F۶c}87uixoxG+5EekV{:_d* |a%ĉUHSR0=>u)oQCC;^u'}8H0]+ES,n?UU{ x~ʓOy_>?/>l8MrHID2VSsMX^"NۯDc558c&'K0L /C5YDqNe~ض˸nErc֋@aw*r܀0 a {RQXV-/p:MP\<=<^越a/bz?ܓvjIg3MN4:]U]STa,@OKdٻҦ62L0ډ"ܺ_z9JNȯ=@oUI y4 A(" 뭗R==9!nKErHc1FYbQ F;v?ob-ڈFalG*rEX}HAP'Hҷ$qM9(AHx!AF 26qxCdP!NZgҽ9l*(H Žڒ;̼|%D Ɖ`Pj . ֈ,ixp`ttOKBDޙ''aLA2s0(G2E<I:xsB.ȼ*d42I:<ŋu#~us{dW<2~sQ37.&lOľu74c?MՏړ@ -N*CB=i3,qjGkUտu6k Cb8hs&sM@-=X(i7=@He%ISd$&iA|i MiʏݸT{r[j顒x.Ƞ"m@Hy_I )j|s#RGI!dTKL&4K>#stV \'xMgaSZNg8>e!^f%cYr]qs:"̊;isXa]d+"v=x7p.fZCg_Ys;pE&\U}ܫSh])qKYAـhhdEnU14&G * QIQs;rԩ.k83֖8Muqu_48dHܥlWW q>fu6+'}xu\Veelz`Zbym gp8펠ˋֆ:1IC8qٞ\vXçL ]X/r}7O}Wh,h ;RQ=]u00yiC۔I^3!?H6iUH:ô 4P$rT`%2Aq-֢׍qt=@x#~0)p# ы9'iri]ͪ/@繁qVGCڤr,DihB ,m 9 _$q3= A$IC"6g^4e`Xo(D*6"^eTh'4xpFڜe'fVQ7~'c L^ԯwIڣA.}H;Ë*׬=`^ 9]r鐃 -Dfi2|QwZk‹u^6DQ1&H凎c!n[mi3)WfsF:M"uҷs.1!뾧1%s,hQs|hx̗3%*v9(I;:'>uQ+v)vR/egBhAAdh]4H:nV$tHI98/)=mͭ ڐn}}~ק?g_6WĩDRc0]rY9'z .(jHI :{HG}HDN`h7@{jnE#[dz;n#y 9D*A$$"^)dVQ.(rO6ӟZw_Ȣaޒu'- ^_,G;U\cAAz7EtlLuoXuA}bT2H_*kIG?S(קjhg 5EF5uKkBYx-qCfqsn[?_r=V:х@mfVg,w}QJUtesYyt7Yr+"*DtO/o۷~|hw^5wE of7cꃱ.)7.u/}tPTGc 5tW> l/`I~>|灹mQ$>N |gZ ͜IH[RNOMTq~g d0/0Љ!yB.hH׽;}VLGp3I#8'xal&Ȑc$ d7?K6xAH1H#:f _tŒ^ hgiNas*@K{7tH*t쬆Ny497ͩ KVsVokwW&4*H'\ d$]Vmr달v9dB.bq:__xW|1=6 R3y*(D{ٛ,[fnY𱹞M=6&$<,"lX-Ǐ_whaE 98 (oѢ/Р΅ 7ցl6618ł_1/=fu).s¯?.S[{'g=Ҥ):d8h\y6]t1T7IUV:;.1& ,5΀j:<< +Y?58In'bXIǣO{&V\DŽ0,9f O_"[l:h¢8wݓ19\:f6:+ .3}=uvKc ٹeS<>ij(o'ciS<{1$E[nP b?8E'xv[K+E{,Qƙ1*dcs_Z'407|qBOgYU|U--sG8`u! qGYܷw;ȌCPc_|(RaIBKb+{P.T! =ĦiTob d<>SHr][KqWs7ѝBYǭ~RR"p9dFg|K- obY_vM 4>/]e/dy,8!xŋ5 R<^mYo 3c9(F?h04SRm+0^PTi-"] O('@BKD6 {NmʐzRj.aQcb^CZ-uvpr CѐٱlGNzIveca=%/QPltsHtQ$J==O}Y [;Zcq6xMY|݆ϕw 6K!x&cc6RS`HFLЩ LkJW8t^N+mXU q2EDö0^R) (!:V_IG^nILVG#A7jF}'qPU嗈M9VS;a+Mqܙ7'qpUۚ5Tnj ۝jlN$q:w$U>tL)NC*<` `)ĉJآS2 z]gQ)Bی:D`W&jDk#ՍOXD&?Y\9ȢG:${1`+i n8=%Ml%İȖb7AޗuV3A7ำqTĔO_fC2NfOjpcm{Ll9vQOT>9U}W3Q#vS}ll>ŰAVG Y%.9Vnd8? ǫjU3k%E)OD:"Ϳ%E)=}l/'O"Q_4ILAٍKK7'lWQVm0c:%UEhZ].1lcazn2ͦ_DQP/2 re%_bR~r9_7*vrv |S.Z!rV%¢EN$i^B^rX؆ z1ǡXtiK`uk&LO./!Z&p:ˏ!_B{{s1>"=b'K=}|+: :8au"N@#=Ugzy]sTv||Aec Xi.gL'—Ʃb4AUqػ< &}BIrwZ\"t%>6ES5oaPqobb,v 2w s1,jX4W->L!NUy*Gݓ? KmmlTbc[O`uxOp  |T!|ik3cL_ AvG i\fs$<;uI\XAV{ˍlJsŅjЙNhwfG8>Vڇg18 O3E*dt:|X`Z)|z&V*"9UR=Wd<)tc(߯)Y]g5>.1C( .K3g&_P9&`|8?|Ldl6o AMҪ1EzyNAtRuxyi\]q_?!zk.)ǟEu{_rjuWݚ;*6mMu!RgQWR=VfmyanUn.Uqsy.|8nq~q˯ v_}-~W_w*o:nUs$ -#9mh{R?|i [%۾s& Z&el-ɬeb.E)բA l1O,dE>-KjLOgeΏe|Bf".ax)֒t0E)J\8ʁ,Gulʂ+lh)6tqd!eó5d ¢ku|M"kP-&ђ5h ^pN0[|B>+q"/[ڲ&6!%<@fpѻKQ31pxFP>TU?!$VQ`Rc1wM "U8V15> =҆#xɮ}U`۸ہt=|X!~Pu(UeS@%Nb:.SZ1d!~\<}LY aBRJ@ѥuȑz.# 3tl7 ]وb Xnݔ[TN1|ttc‡-5=VrPhE0Ǐ}Wd|\aD;(;Ha.]1-{s1`HbKV$n}Z+sz'ʀ*E%N3o2c06JZW?V g>ed\)g.C]pj|4逜*@ nBID f"!!*7kS4޷V+8弔*A19`RI/Hй qPq3TY'퀜+oĤ'cp2\1: 0mtH,.7>\hSؗ΀ѩ آSNEYdEcaLF&"AhQ|![gIK v~,Jc%+8[dI368fp*CDrc3k.2WM:UŰ~{N_w7p682~ =WBX"XA:#u-9`x 92$4_>9WvTIj`+C2"s%DƖ|2H\2+AaTaBˮ}L@dr_Wfc>IdA Od[jlec=XJ|&+-T1m8NP$%s,ig\Z:h Ћ߉n!r}_\ \5 6 d#=&X^-kOwĝJO\Vj; )!eoB4F\jtctUb.L[3M8V|&jZz/@7aV),A[5TpUZL_?CU0E [%W%vl x٘3܎y,< )i7 Ո: tC`\?c%v7\Ct!$9iç$><+c~݊lz1H[E'2/clQ.I`AWOlw&5fH n`gMytdx)lwAK~GgbJI-tq5/i ?WǠr^C/1NEU<=co(k0Q~wˌ\g,\ rf\PU?H,L#L7E"`0dq@zn~+CX|,l_B'9Dcuu|~z+G q|-bb^HcUha9ce1P[;qsA.Ǎ-]W‹y?ڕ;d9OU__g ux1^^V2޲jMi^b``Q#dBxV#NBk1;DAV$"*1]Y~ d]koȒ+P" ``'k8d &)qM>d{~V,;vZV1bf39U~FNh DraꎒLDOX &H0VXD.4_ ?V3(X*-<D)xR(2]ic3-1VCD!Hh70ZèY6eUo;]Yǹ$2GlX B~fd"HhS<2nHve|[c5먼)ƻ!j 7͜70ձ,MN!c5X&׿j1>b,r".I<(Ú| Y=N%p'9?"r8Wc@2:/P=2l{%e+AKVܿuFAL4,3" _\s}bEːkEdFeIX^L=,\bٱ[7BfA/K*XŽ̘XAgm(8qs"5.u1wFl`}o<+%QȲĮ9*}N',J_EyrMqj;T?hT#&`>=h< dՋL/Mdԏh@dwH B ;Y1w/>o2UxzIvr#!9-sS٭ 1mqpL?00"2`NG>RG'mFu nD2O^EϦSb$ľ0 3Δov_yT5WQYZQ*+`0MכUGƿM\B=ף/%OoXD/?\{oOGF:qIeaE3J0$y!0("pQP?dV9IA}btLؤ^I_\PʚTJw/\ɢrUUլ}J,Tulr$jZO֎ʚlCKu)$v+lUɸɟI*"KP!M%D;uf4 U~O=PH~ŀŀ_P ;O26+[GV9+Cgv+û+P^O: 'm:9oBp8;݇JQrN&C7-&Ake`RZ"d?j*`:{AvNj&<N7(1rԴQWJؼB noo,T 6u? v\{th5T]GMD1o_twx+O<`z]Џ㹃yfy(*I1oO@#GRo!$G$B_D-Dv>_EEmUm%DS2_~l[˻kp49K?=Fq]zM|ߊ- \c!/s]ɛvGrv_A x^#Ur~.٠X- J]?n[-`sulL, Dá*Nzg/2GNJ)Tk(&-m҄3zY3:BI>}pz&=AWGQgT*2M"q<|r OTTn)A;b,sk1{?! M%'mFyhX$8.Xj* 0I>:Vf UU)}bP¦RbL"lrc4e$Pt*\XbJ0礬?4 XힻqǿOϗ֏1(b+ֻk)Ψno6#6IŪ0*r?6~"O80j`МM Z+&X,FҨNqVe~ɦ2Q5,pT'iQ##&{#RW4ɒ2HžD-+.<K' PHx`V2W]Md9L}"(*iTOq$hc3KæU'%0e \ 1\h OEm*~EVLj \/tfn0l2af8Vѯ`tδ?a5>LqX6< ~>G1Lw!Oԟ}Yaˤ )W!{kB\Ʈq v|[쫢SD;SW2puh$8S:=/a_{KuSdH-]Qv[JpS .Y^%{t;v`e'{8dCNjE^n|L Ģ|dlM/m$\bAh}&N'1U;$fI׀ + [lL,]ׯ'j=ª H3R|iDbjurIwM%.(Td(Z0 گ&@%UԨ%L.S\1$kxU< NաioR-`Z 1PzHV ! #R aZ.S ?hM6F>+C v&0D]>#cFq)ֹ5p*Zs#8nD!'W E(x+2R)׀}bz[sJtUxiB}OOD[Dhf{EI}c$Lmr`ݖغNW.SqTX65j]O݆'S1LO6[ Al|kq]/kn<:G mս\+!d(Vo}<ҰֶVfz.g2*=5 W 5+aA31Тf@0̵vRy.dru:%:`i5 VJt Zj=r{㪶$zU7 TCdq]97V</t!CS= .yNQ;õѥnE }A8.4pi0ުf|7}(0ոzk]i VUWRBUKAWꢱ!< UV|fgM{xM\RĜwA[f/L(Uѝ ZĶ 븀lv0AwHL3{%Dy{H4yՇi 7{Yͧj N7~e]hւ%2j62-f ƕ7WJZ1q)nѪ.e!S9Dn${zcev_ERm9Z%q~ q&A`Zk*sLBٖ~GϚݏT[760|~{BiW71Q3fv{XǡڐSjE! ,glWAirR3HG,K} R{jx8N}:}"Z`7`o.v!1i s D/aZD'p_/񟿓c;hP%؋.Y!`Ce Bhc6}Amj',ԧl(*n&xv?9p}@gc:~[cc]qmY~uu-DoM~&jnXBҖضlgme[y2W+B>r}Iоt%-ۛ6ܶޝoQ| Mm Q0n xUggmb":'A~u:!SPd,{_ ڄ=pe6XS` "3b;{&NQC嵉kEU R$B3Lٻ޶lUvJrH k':q{&0(H.) 93|dٕ4tm[<~y1EW uAhOeveDU2i4j0;_u8rO,_+s=(WKf>|9[H2BZ)3VISZ½O+!:ou ;QUV4k (yo'FV>\΢(8OrW(՟u|I?ƣ6z}!c9=ll" }p V>\#jJ8ZeULwNJEg : ^,x=l1#;t!p~xãwrBj eP'w!EmX4CY 8e?~㞿Eʧ Ѱjhxv071)+`GASqcK{ wnߧXۣeM/Ҟ | [z{5rX/ic(ۏ&Eܙ{P4BLGz;YxYaGb㖆B~_ 0Y-|@yދ eOE#eIG"^'^R`|8J8=/(l4rv/}39ՓP' 5bu|Grst?74q3bm7Ak;nO\8gnk:\#O\W'>҉sofy̷΍/ƅ;ck`% t/XyAN=I4mXNЋ ł A ,LWʞ#z-/$<2P@5|;NPϖ)_VQK!@v'GU"S/ְt}v_>X}CKg`k|>@'esEkN}#Ox*boοݘ<4`0SI#E\1$U%͹\@(ی`\D#Q }]D, BCHbSs@pWd+0g)𼌊{->n֕*5sszOQVqO1$ T݌X:ScwEn?avB4Cnhu\*R.el؊޴d`3 H m@3~쏗ۮY_Y =ӲM ];8GZ4J& uirB7P4lkpAf1ՊZ|mJysG\UX6@gm[9:cv{32ZZHzEH a-Jq04h`a .n"")$ -X5cvՐS~YͰf@{ [ ̮`1P~[E1 ^8 lvҜjO/})]a0<|HhoIbo~ET,%-/e_2L1؍E|&K| #&ܨ0/58a8Jihv.t-k kQWB@ tNպ ǁM&}-T7V0B?]usY uYGrD(]o6X;4j ~vLq7tw,Z: =aog&\|ro 8C4+ao>Pn!S<˻h~\dahEB1p^ ȁ<4~\qO\LyVd9:h4!q=)"[f!Ǐe%RX) Lf 6:ãut lG#ZM$ւ%I>#dCa<_WAEુoo4^UB#ARYJ dq:axtԂRD(Sjz{aAjG$W .ЖsSJQmlâZ2[.yrh91SaLd/--W:6x=E8Tx;c'̟l9w-k tD'q M"y`hkO<,o WCd@aaLh*)l IM4FH-ՠabLpBo(Ny+4$ʦҟ wA`F*_+$M<{xQgBwL §WzeC%XHզ'2fh{/_jdUmgsiQ= &n ; т5M¬x'gYRM3٘xvfcGo.DΪ%J0@8縧|txQuz)Q\]9QP$%)Z`ߐ[Bx˥ 1' }E8:(f3SA,ZJfn%dDqwxd5P0FEZ@qAc^DRb!CUz8&Y`:t^D2k&1:i/7Zߢ:G\ fجeJ)"::U>4.&-DSHN1F) f ]xc@x"dHY9^ G{X䣻^VHaf6"_tBwHަ _)  NT9֕>?c e"mp5a$SYE Nkb}rKQzq~SOJSݬgbiK ;Ro"C$'7rwplzEBڪܳV"3ZqRq"XҁCMl@:[$QE=կCgγաٞ)h(_[  QEK8BRYԪ&f[=[ P]8f)D"zmT?߉ո#Sf+Z}oII Y\ Hpڈnra [ 1Kl\QB}i\hxee\fzpL&pnv ibVƱ $\A.r4g$GCͤn\j)T] W@n\t$IB`HBݤ3[5ʼnoΗ#Q=V15Lƕ!TeXYgЕ5k0K"}`X/|(h'lv5*,\W(Dmni"eQFAw isHe}F7eްRXex'PUjDxlC򁱜e_‡}M4w#c%\ό˫Wpr-a% ƛ d ih+lskcYlKi-BثL'B).mbժ#:S|4WX/󊍙[,X(s@[VVɕ4c+LM:%KZ8wN^5JcY2ڬ3q^tO&$qB5$I%?;=jEHHۃTL"P71 c=SNzWѤp3 +vZ.OZ6;|,O"[g]h$i2jI:Z^B쟵OS6D)r~d)q(2 GMfTtjb"-*d:e"r! ƶ6E\٦.lƥk- -} य/&Q\;Ӓ;l5'*iRi0h k!(3܂SK~|J\xlh sYʕtH"]8 ؇RΛYQ&BQDošY2t޿&Ըp(Z{ =P\G޵$׿2A~?l`A^)ɢD IK߷&wҀ"QwשSϛN 懿ۮWj}z]<^h!F?w/=oNY)5i_?^cz*&==CQf.pǽϯW'C˾ [Cy3GW//{&rWoew7~[q]gW/?xbd_\?t7tWW]d~zוo/C*xʺ#M7:_9jznWcW÷no_u2eOg? /wR#gt0cްz]/״?V残n.vғl~Q/0Xo `HT~(A&)2obd*d;1Nĭ~0G☲*djTFL,XK){t| };]lY9ף٤s‡Er]l @L]0c<}cX)phaUeyv[cjKKY(>(\>  S(A) O鶬系!/dEpNY2o#S kr&$CXbOz9ʡj]nSE":/qkN2Q8%rb,p|ޭX\ϽnFm̎SRp/E9uDX:SY~l^< a$y 4fK>+#c>/b0t>At 16D׮~ZB2VTb6CNdDV ͳcjRo |c]t[G?._ ,1D^l"ĚBς3ee45H ֕|Ŗ,+U /VL0bκfp81fB؃6U9D_B0 tU î1fsM7v/k+tSJA,eBf~ݖ0)S5 p!CI0SQ"$\B1sOڅ֞cXTlLUp%=UDXb9x.dB5h) ,R+9i81aTKNDkt5SŠ1`v_wۼ%JHc$#qJ5 D7[I{>̡2 :%K0fs%!; 雊,qqJc 'qflxJ[Ztl, .xބL2nb,} ^PiRL>+fx῟myt)A9yiYy ~{_/7QK+^CTW".HmM UJ8zU~",Xlڻ,3}f9v?gEK1I|L6I%v8 ݲw G6og$Hf8<Īѱ!FjN-kZ ZK#u^Ze;VbgIwX:.O;$8%7 Hbo/`W*kBziۓ-|% Cz}%0>h\iZG%-(uxؐ8拇2<9+)YX*&)R$2tߕ\Y) QZ1zM,٣tꦿ<Fx=$Q]{Ε4elF:j;hekXC p~9$:I!h]m ;١c]>W8f8J,ИH Z6XNә S8#^a@޼VW 5v noHp-u]r]&ɼ!^ZLP3:+u=J87X:%"4 ­18a#rKaHv]ǀQ?mQ3+7rMe 7š`P)DpI9M 's}&֤er 4?=7w[*kheڃgO.l%J!zaJyVn~Kç$gmg$zϪ)#l9S~t1k/WqVU_KU2fE3C|\4V+]aQ'e. 寗΢6cfQ~suɹHdIǞ8#ĶpjmL۪-d X:fސYMճcAqyO՗N5"*JbN=k%%CKǔ IF 1:cS0QTFK&V($ ky#S-wXU.| {f3X~,rqݗ!KB=ӻLVh8{sy|O+E_:YE3 gLpٵ/ ,@4L8{|{Od`""[W36mK'8P+npa'9C@qK!b3BkeH,^CT6[D:Bwp^dy/viD^zCvΗ'1,ϼuImj 5EUo[0KKnWċ HG} FVUgEc,MqBR-cFw{{]LhǎYLc.k|H8nɂcõ!fސi PfB%uc!zs|}ҝ,Ym7)[!7ëNA9J`F̯h+3OAσbD`~4CxX 5iR_{*3ri*Mu#=d :σY:5c6`s.dTEn"YrtHQ{YpgUx,Tķ=B{Z&YZɋ7ڊU"!4Fq&Xoi̗rGbLcQ8d:%[RҲ"qDX:Q}y =c? .&h, _l fRTZ* D~:BN:X"LƸK;$`P#_tTO%8i$IR2hUEb[%%#K_`zFK'y+8y >4`U{DA,H-$Qc"djyo$8Z7V+`-CPg%8p-UKV{=M[[G69Sj~TYẒ84fpSw a[nbW.[?~&Apt)¶$5DU!шKfcݱ6h[(BRa iv" :`17'dcҤl'&/v2A/$8*qc mXkYMab7#=}ˤA+)X4`%moD¥Lf&Va2'uwOs<z鬫Y3Dݵ'QK gn06.:%f)ô;B HѾp&?+<[.y5Ϲ {.Y\QWKd_Dɞ(%{Hpt{JaowuCֽc7,;!P9WQGoK}3 vqJ}l/)IBl%K2HAX:9ϫՆG3a3Ңοwmp4e\Nv3zvǦA^KJIL=)#LImO2%v, 0 2D[kP܍ML((f+ZUZ=uoL;΂9:b"]H!S)Q Zq*-mϼHղ5˶Бj'7\U;[ZeI-|ā§|RUnK}^nϓ1qaB@x)$ҭWi;Ȇ͸q=N5Sq:MqK cĭ(t^Dr+8^DXCfALM6Ɨd42Z僚j`;>C:M,@j$DwЊv8]i*luB~ʻ\̖nͱVbb-.eafìtIo*]{&z~{Us#Djh+_ TEYKd&c/50.%SzpWr_Hn8`(N Z:n9oaw4n0ʯ\߫j[fŇ6k}|^6VNv<= BV?='gג^wa#HuǙ31*u_3*Aל a6L?}^9j$,TКb74?ptlTzQfqqD#p-TIJ>(fv S6 )sH#@Ȭ.?'kVL32C6kCh[gIɻ3ܙ$JĔh>e~m20M~cnL7Ǯ~"Iiǭ%%r]?2kl5x.j#AQWՕUɫ¾u9Fmr výQ> ƻd0;?q4i3θZc7AͤȬfry9h~UwTQW njR7qu~m"]r^QzpӾ%վ+^4#]ĩr+,^[qԮZ%"OS .s -SZNpזlp,Mh ]LnӚgQQ Zv \cbaTg1p ĄJh%EB+1W— ޞQ6I-JƚշZ_K0[dPt>\iRUa$\YdFŪJ)hNy7RޱF܎P̫貼S_/gɵ!hT) JKYfw>nN?Ӱ7,%L(1(IbLIΪDa }]קe1IN#J 0R3ISf5&,kN&lh*#.] dS%i (Rn_QὬ;GQ;k@`kD$ʦs'E\*NuM.p\Kz7h M>%ˀ1bGgі}QkWxNvkJŃy GԶ82w'ۇredem२rKy~ Ÿv·K݀=O|.-'>aA+ <[ wVY< jHkl/ܰ$=k0s_.yIq5d-}L<Wm~ekU*=HڴyDl^h2CImh^ZI=(E9wDbVR3OBG ѩWnZ'+!Fa3zc"wGC_9{90^rzKn{寷.uZ0!vfgh;]<'w3x/p젺޻yJ/Rd4Wu`` t_2 rb"ҭ̠JB㿡C9NCr5v^>0W9v+zxDf[7knzJd ֺCF~C+4g)UM;S-K&$N%;o T)7,?P "$̦*_!-sqE])21J[ieBa([Y2vH2vpЈ֖\?d,K%"8vH pq:ĸT$6(bĔQ&u̝;;d4d4dZucKƲ 51)ڦH(cd p2)J0GAs&KFӎY7nMLU%qH8:k,SQ ײP*MZ̤4NDjRW&M;j$M;!zD$}#!l yDz>f=rtTn1ѮW=)t۝h_1~YmWJ[d" HbuK _65Lw3q53M9dp%j]]dhgt,M=DkY()ęvi baңH8Ēs~zm)!*}@>M4{1:ʆ pU_EPL~ć^l֐!}g޹-_r3L7lM9B<-y9uiG(>Y-9_E.o isJ/Cf4t8L;X/`~40F IE3@B53\ܚOh$fOzvC!B 2;d!!8;SO,Ʋ^6hg_0ohB%9Ļw!}(.C{\ˬôjt|_ Y".|G(Gb0Ѹo`o7W܋P20djTRx:k06i3Ėzcup^BfCP>L>=^9?ow)R  f iluU -Ke ['@gƺɉWϨkӬmؗ&8!^L+I`IE-R?MƌƜZ+lB %(S)M1Zp$ _82mL~j9AHؒX41)c %Dt Sce!>5@H:YIbL>~mecN17&!+;D/'D8X! 'R<(Q {|1RJk,݋~RR҉xjJI \bAy<Q,!! - y?WBr J84 ELkgC99M&t@,!gIڎJtTL9T"A4(U^$`r<*5>MŜ*m̦ |2JKio4a<RBԀcw*xO M*<9y2ޏ,HC8gk%)(+0jP;JHZ:_T 74h? ';) s: &WՎ]ݻ?pj`A\IzX,1FI #.LJJG4I!fܥ cϬ6!'ZSVtҤMg]ckۄGMK4ndI[ G&9xf(-hΔ3Ds*E E@tCB |v)7C7Z$x<%%i0e7LpZtKB4X(iwIwHJT_04~;m|-0؊1BlOʆPiՃl'/$D//`U&c6V9V,S ?C\ V-0ߵC-X|d4w} QR3喗/k#_iآ>Z湷Pt8yBUq`BQf1TZa V+V[0Gi'Ny2zxxN@g6$!uY,\/ZZkيNzJsQ|#9r48෮Oi@^;R>TԲ>F/P` 0#nG@ 7yG )7H R8GyCCʩ!gh5`5 n9 v0~|sHP>l#=p^zh23ctFA2@^+*adBd)Н4 2H%B2ӎZAiFRE\'Jf IpT4X*m54xߧ26ѠvxqXU[X $)(nh@ KD~Q3+̣4Ju]5lޞ&ᚡն_`7 Vj+nqU4$_!.0_a ;fcfZG[p#-Q,x ñ{ORRgQcIb1x+7"U;9}#@uuWCa}eFYgzZ{ >qw~֋mGҔΊ7I\  ט (=)]' ί63(Lbeq1 `Z#zQ`˺4;C-ZG$#}K O6&0'G5!\8'Z[(SUcjp Rz] kKѹ6,:_kw,zORr{_(bU0ͤbǭ"JI]Yjb&0oiv 6{5Qo7=#NJF1{VS2XĕvӇIY%>hfp7z Օ[ܔڥX\}q,4~|W=g!ʩWDĎ)K6<6{bk.qs>B:?B g)#[0ci37e%&09ڿKqΪ!g=eyh6BX?H 6nP m T> !V=B#J}1~Ա%>,G ua5];6Nf%êTt0,~.KN.`GPF,;=B*eA[H-MGF0&lwӳ!\m^Y:D!S֘b2C`M.0ereEݤdfD0q2_jQYy>F8o#,]\!Z_8e D:U XSž$jzAz7(neY>WŻAKq.W <4NĥECɴX%m^y;`]kT\x-n^a7/tY4V4|`ҰmTPj! 6G `y`~;Xx/8[2 ^ \ e1XW0WeMxx]{-Gm,}#VQqY^M TB覓?ځZ'UGb/&˛ryLe fq.X:Γ ~#˅Lđf7|Z0űPe~⌋1V^C|kꈏZS(.a6,?rOw$Ag-`pr.0ΑU~|'sХs7Pď< aG+Z:af]HSl*k})oxIJWLmuw `.SiRDfs 3Ǔm_GRk)D|x{75.:ǣӽ*_vUCrh1E,-N'9;J^'PƷc_ͧf%͟`M_C/3QYu ĪLU|n3\j Cc#R)`eBnԣŞUٍki9ϧwxosԲ!UfHR ϥU;x෎1=CSZ;둵vyU L!}m^R/Z̑Rp7V%if|Ʃ x",H=93Ճ!jQ0>̐4Y܂R^+Ssz{%T&#6jE+ÀWw?wIQ5WņN9qn e$U@jfmMѯY)BHsgg*#vUF3C飇/ŞqKE;[%cQDDI%:ƽ`qZ3kXDX1C1x?`ܯwǭ 3[Ƕ/7Mw. TZ NUQwr%% >-xF]ˁŗp"v2UJ M)i5!W,E~G΋^(sM9Aku1_ީ8pۯTI ^SoJ<o 8`we/or[8Q}U?PqMPUnSPLUlbb1(!eAUGa/meOLQp^_[q%;pWTO^/Vv4o3R4ɨrF;j(MV'o}SV|% +^ZYX~p37&%zE`r`t&/~$L[Kyv^K |6}Gqun562"B".l$bn-J+:^A^|5l4EXƟRGlw=i|d[ aVdLK%op4?m651FoB*@YpOz}o\VA+ /ϦF'Nۨ0)Ol" %BȔl5x{:CoB݀aqǒ͉ͷQŪ:\-fWo~XXu/W$(mug0 "*B} l֞qnMM>I Nj)bo skperextcW¸ZH g)JI$pgnŎ'JhsY[f1#ֺ, 6lN˦6,ʲ`}eƁ,ʼnL,/Xiž ľO9˭zQ<d;99 _SlOhǕ6cPRp \9Ӣ ^aIR͛=aܖWmbԳ//|{7P.OlN6akN1ӟD?{F |˟:sF 걌"VScEHu>aרP&%cz3Vʜiʲ 4B<? ZxR e%W/ӻ{_]j6pa Sx0m OOUlߖL.Pn o~M~_\׻+7Y&3ꕌ*&qܓ6$+B?y޼`{żx>ySEIJnLQRAEdF1˜Bb%:jow_~ta/ޗv6:{%E礵&z #Ǩc#$CA4,h>X>f ]}k֊i/Ea]A؏l|b3 |(u B.;ag4662G,:..(0wGd>aXtՐ[d4գGR(lxȿ|-$oq)_f{gRr-. RfšY3P2pQ6bˬ[ ǩ8N|]upZl JE㘏fIAQP#!8Z,d|;F l )G|G& TG-2us| sUo c%G*Nd'&'&P#!߶ _dz+z7J4"!Xl:õS 񑿐o[/-&c7Nj9aǿEe2apHT[̼-Te&?yKpҾPV)1㥀0 n~ ԿnVB  fRzdN|_vo;4 = idކwbYݸlPn 㴲@5쫽W{j4\W0&X#6z2pB4pYrtvyCwg-β1Cg΍̎Qܑ>&zmaC-Ƴj;vJd9A[0d2}<Lp}MTRqo%4ڌ QeƑ-X~owq[[!GvCD4ldYNj#UB5H#!HZLeOlly.Ρń,rXZeNZ]^+(OmaCdZ%zX$Rq>0inD~b>o8&qd9G0,gx<SDh5uIm!d/sFN|rSO%SHZvJjdM4QRO>|<rEwS)?N$1nGfjgx"Z |6=@Cy+̠qN kMԇ?O])Mv.Zȭk*$-l⽎ăȕY6KֆeDqO\!n.UIf#o,;m>ɜr 5̦^5Ggh%=syEKE˗)Zɀ4 @@C.]ߥ[#QqaTSeQ.]%rώɆ9JMEtQt>bĄ[BHX`qHO^:eC,@CL q|Os8R -D/,FRAh\ѳ.}F˹oLv7|!(\*;hT\/$b'=;(&&WpamUҘAy"8O$|(Ǵ;6=l}Hc1L O! 5F oZQ[E2Uq=AXP\ZnƘRA/E-2IQlt #;.#!8Z,ef|sw9F^-rkzY.Zxk> ha{C[rLS*7$RO_JJNjıPȈ[2nin{ lֲC %~k,#0]P uV'̈0D=Hxyq,I )o.["+B1̂PtDx$ |(SG,8?c_6<8ǔ)(Q֎.#EJ|T2<zp @X.1|aqGEf${4`!P<%7q\s>TC&){'A;hT\.ɈN}[ \d*NK§K >n!,H2x) 2cK +iA$8ڞ$|('EY{}@M;hT\l*O:O"`UGObuç$1>8!hWe%:Ζ|uk<$$6;_x9t")|Y~tV> [AJjQ|C]8b^*W:`}C>jecҘt3$մ ;?Id>(c*- rlY~4q4PG;T P_v6x U[txHn;q%#i7)Xi(()qKʣxcxcPX8ZS{A}14*|*lS Z׭J3av֧dl8UҠBZ1B+yVrmjU_χUX^S&դLzS ,)tœ=tX<>#do-0z+H4x6G; r+Jmy.#r)mT b)4P&X~mUOIUJa ;mm>?k͈s*5$U9(kJ}28(yDLfK)fdSM)@C*[Z0fE8 q-1?P)kKq 1ƴh ,WT_cJKM~DNQHp#Nz{UZ*tOҹ &]ܯ1ݳ/Fɻn|:{NMFmvR9)Ӯڈ+t%f#?wh=',w-,{lT|Ƥ`BcCg46v>ţ7ᾭތ{FUah=JAkїFآ~m|YtiluNUN lGslW&!!$\'4 ,g. XZPlMԦ*{Z^^F(1RTpACY.e]c]cPdWsoޜ?՗BUt`}-Z-3?S 8Z1;hT܊>5+<MJN̖>[SRUx!˼DzV=s)ꊧX)5b&x8U+c^T tY =:ֽF5jm^Bi֜V#nvk5[vYmJOXEi]^7ec})#@z:hTGi9]&}|07PD80g=cth& |5q޾W+l3ފ+5%6f/H5c*hVPgX¡A Hk :4#&xvM:N֊hGuvew~9/.EQHusa뷺v\4dUǁo̕#v zf]zV8MBy{2;Y،ό||12D_sB)]8GP +,QexS:Zӑ>ZәΫJ>ˮC:hT3$b]'>.~q*֘qsՏviqy*S;<`]dV_EӇK=A#a?}u8 ~Ǜ?c,20)Ƥ.E$N$H%.·C|#Z#FpdFÂ^/,H>YVH5b랛_z;/ui{^iwݨ?+pGG*Gz{:E~)b#SpL;~{yc9 )J2-ӫk |M!"3EʉP>dOOo 7c x;~,1B5bCxRRl&ѧ>-wS 񁀻wJn!'ɬ]j0ۦmK}t Zgw~>=m`>B"3?]th9my<6!_y)TJRh5uIm!d;sFN`oäTb:= *!sN@˻ح n:³wQj|Ǡfٽ& %wQJnbA-=Qz"C-xkhA\<2=f= /ܾS! ;q?'.}*=%BqBߘx{YtʯPNw wLnU+ DKq+`CF cV8&.D<>B'Z?=TfLOtAAeBo ؟LNJCrl/Ņ | =X ]+|:] ZpZkxkhNtYw=fwШxĻROc l~'Wƻ\"&&T=B .=1 iC%#1ەg}xETD%g8rhgx"Z%y>PUڂܬVvs>nv>GN9[̈́+Z&#DP#blR,zq^]YѲt p;@Y.xwA~4_-1t!#FTƧ<ySqV~>uzTMFhY_!lOWaqy[cNvrϲp9OOrZQfKW'Oh" ,1b6[wߡ|&4tT |H璉E*ؠͼ3#qfgJ?yW8nJcq_3D!vˇERNݽ=3+J~QVl(a vE#z!qk`N[O1FA_+r3N~GI\ֲ"'eQ8X{j\a*tjڮȸIb1psas=h80]]Ы/ly9,Ct+3ȯLg<eR2aǦNE`y c۝1F; ZXwԳ345M!Y<3&DŽ&yn,tXk7ll&rV0%%YFmi -Gu"QJW_u cRZAc|҂zK"KF/CZ;M y{ӈ"R&AMayFu9'N)y9< 4Ze YIbxÚF1:"Ǒ_+nuIB> ň/Z:j=`gMiJfʙux/G0lSz_>հm Uj2Qe{sC]ȯN^.9Ȳyl›P=C cȯjF'_<(s 1%BF*/eaҾ8GM"QJ XvTN0Jg]{xL7XD>`0V :uQ%%ʂ-|IdPJ*9bM#z(+yqUl-Xeۑ"FR%?*nZ29!lTa(tlAʐY#7Ǖ2 Zي4r|Vn]ÊӏjX!V9M#d PLë%,48&8q<;<=ƫsL "xUQ i47t 1i[8#p}W܍qy+jBzO/><ؖ_{g y3ՄIדWК {ctŸ4P=<;" b]Dn9\^c*vnGpf&sjz&$3*)?Nۙn2Q z{OvY wܹgլF)θғ9GI\ܻ82TQR\Xe)2Hn4-]< +湎\q5NʲLk_jqFjDnJJ8KXRe[zDŽIaؗ=qwmgoXWF㠿/feM6?.[~xJ"K}-8ȘnhA [0BKP@zxlP=ն’XMuXQ]! 7jxu(%/9\Q2RcE0L35 hZj^ GOU*4 R;{1v# !z|7/rxPk(orE:/gAmQ=֠)aQ+)hӸ1pl݅?_`6g2\s-XU.X%s ɜNZ{xӱ~)tn gu<$yp4sۨs -$pUhRlJ(UӒX89z\RY~|墯_QMWtkqѫa~.WSa4|p{~oa%jQ"vqʺ@. Vs >6 bEU\ 0YKX,}LcNbJ&~] rlPqhD9i cXhzF}I;>沾%ҜS 16TAʍGOQMEnaa&ENue3=ϗo5ϰ\/n >׳3뗵W˿?96G-ԏ7'o&l$1&,{۰<,lSj¯#Fe xa_h?7-_S29XǟcyX,[.ij5>f/w$yUA]?/||a7 /PyO|_|?j_-x>D7/3xWEb:|k.z߶n8?"zBI_*1~ch,t_FUoueD [P\{"x:~o;@F!+ZgPhf! r1^Qw•Zy#BLOo""'vK*h? g *yl9ϖ`~_7WUu͟VRת"Ty b]¦R?ܼ~9xϫ з~2:sgg]^eRWvPeG|- +-}?ņr=vm"o_ b5_]ˏX/8ԟT~W`j#Z,gݯ?ϖ1zmk ހ?j>Zv-}HQ$mwUYoVf}zLXilػ=:͏0kSOO?f,@' ]Fq uMn^~dP`q\gK ʅ19nvguۨ߆yI؄*.ut\\-^1Rs{Z`3ELH*㕞V`_ ]a/wZw o#XdyM͏MTM;_lʫ*!Bگz#[cE܇{]h v.;8rv;oܷAp"`oA{Cح22*4cT+V1A8mm.>?<$mlh]-1hߚL*r91it\f, [kǞ5]Ln['lo=6Ɖ[IFEᘃzWOc=Up!*R\:Vy01KK=rЈb,4Aфknu I|hKY~5!"NcDIurc{!Q$wWvS=[X~YCN&ÜP {z7К;}s/Jx~;CU4P,bE$]| ӼJDeZb2"u(O xVr0oCO&3[0%:UzlL;CWyzKBzx 64.;Nsٽ?'Mp%_ u8D~vf-r(8[l{#N78BΪF`"Zǁ80}cPu癏B]pd E%]z=,s8V|E9Nc؛u2Ը`?plČv7&̼ïM@v{Qj4ؘM&es [XNPrIDn5)thYPmR,#zVG;5rqȡ۱0d־Eh3NiaF@t撃= )Z#=dR).4Ō) 楩'@a;;LmHH* Ef ;)Cfm?ka_ח_5Ŀ#1XENcEJh0&= ĤK"Hf=љ)E.2d@ D )r|bl|Ĩ;_Cܻm8JlmU5 O"|O$?`( ce <8MJ_D-TluxRoNv'tbtB36?@t{N :o\(#c= h檂Uױ[9~T Ny<qwm!Vʔ䚡z[(4N1i11Cqk#0dJrSf:i?{<-Qb,4FA {zsGN^4FATp*jr0 nP"{we1(xNS(A)^r F) Re3)]Uu#Y9]Y{q;Fk cE[cN3K"x ET\3"{1|vkCܛa>H{@шDkQZ|PJfJ;Z0_ɜnRm0ħӯm#I8,vg0r`d7mMdI);bU(AmdدzB#RfXΝ'nYޮD*w1]U3+$d'MTa'td,]!O22ZĿ;,x:pxm'ᢢz2:{TZUQ3n@$)l [CZf:a < ' )hB4CFT& iUb2OST/zO'XgGP lga 캔n![eۮGOvL< KG=JL6$4)tˈh``41" 2f}OEz˜6(5a "j o36D>e+4g]9$vAH ;!: an4"ڰb,eRRRy=>n{{ݚ0ؚT n^)aԋYHߕ p[H.h9L,Fz APJ7P]0Pڍ9繶 *'u!m4 mMDtBa B_>VxΞPTgT%"(4RBF2dp8K)\nd#kutDmt pY=A'R(<늹l5'j.O^8 2B3+ w [i:5EE.<"ܵ sD½$ @'ߑ11I*GШO6V+:Bk|#ruuF߁.FtFȠ%,fqSXb;W ʨw蓲N^0(ǘ$dQqea0`2.j>pl ٮR|tX;:>Js4/-+`n h~Vk_^%L˛qQ!l>l/W3ͲtFG?8h:F CRr>bXg{L+d 3l8#* {#Fup&>3Ԃ=NJ%4p8v1, #] , N60SgmG+w%y ab3 C`51Vaj%z^Gת1ea !fJdi $I}f!v&+ja,N Ϥɷ zȱ 5lsN. \:R`LprI2 }8H3 BUb. =cc1 c%r1{f YkqcD.. .#.#80(g Do]=@'/~xC`bp tS 1QQw&xI1} b OT3+&;β<}CTYi0HU@H-Dh\ʳ&FWlC h\u :L}:P6H+F?HWzF<ka OʱA *@ 1x0LXFo ŨwF(v\F@`.h{43XΟ0oGD+VZp1/l?Ʒ% b}կՀo;@7=,ߚtCq k^^os;4>O'L] wy{w(-ΗowpxBBpI4e:A FI zb ?do׸Eڼ>_vt,/1='ٽnfWNʉ ~n~OJɜb̜ؔ+j彝4)·ȍj61Uq1nE 7|7#?Y}d,|>[~T|4닋Q닲..'2 {3T3˚vT8eLCkoC-7aVVRva9^_L@c!{;&H!μYAh6f+/cZ\I8=`O~7.3gj+E1-@t*煨*OjA [Z5>3&)O0Swox!};/A7\m.l{ لPNZکRÙ8[`j7>r 'Ez (Yw_o߾QUd9v?qJ:[V .CC0ɨ磇WaL}^m~J _-,ҳw)P1{ۡ1 $J}W #1'=G߮܌ۚj-,fPFUJ*4m{ kωkvdww.:CsPz_wXwF 8o%!D#X BC8n)6.+tgk&zq! T9BαVޟ )w{;>(:vƞ(`S\hq*m.=.EvJ ji)ayD!rl;L8]V}E {D%@|27[PL'qᮞHVx.wص:0_%q]Ne gG<8&῅¤J R_H?II9r-Je[GN7$'TRcGZi'Rq,>Tj QzʱJRycE1}%ϢgkO(>kHѾY1<#.3\uJ#ZJ /-o] epv Up2ѵUƄXɬQqs.ڽ?ֽ?H3.x=_y˹Htg*vqBA:E7^*,P;,e'cZa5J<3{B6g]Q^Fճd7 ܅K^~w*Mpqf(ZCZW!9˄) m1-nG_6űQZ>g [g!x0?rΡᑥSb2MgjY뻻yN쌹i[[F@v)GB1" ̭a>͛[0+ŴA]QV %լ0 9%$ ^}ܼ1߼ :UGDm c=cu1 ǜ;C]YJiLeHՎP\NL]^Z| N70`BTq:S = *SpYO>:XLwp 1O<=˅3 {X0232$M"m O`(֙j 1m1g-Hj gq] Q88/ɁD,*r_[7 }^- {50L&K#@>=X509hJtZ ˓ʞ_ LsǗ@˛_T甈}7>~h@ɷ&0Stҹ`ЦX2_t-hMY@+bDVĈ=a q:*iؙߥE;aU1=3][Iz~&BΧр꼦O.߷_'c}>ud̈[Rc`,]qY(bb@T& 1gD-*A>DJk{p)r>"\ W c2B#V0 H!TerhY' tTbk!1XJDc.BDyy>hΟsXƦjVqi:yKD =M{q4]i3̼GEvN>^&IB( ڣxȆՁF I]MYGҪ!;\dZ`DiaCfIAqF 4>؈|BB#m⠒@VJ€*s'rĴ䕢}%q;k1-0y/f:ƦlIߌM_i\S3LK%eL؜.B#Sp" ^HENqN2byAL ~3-=ӴKh_Et:'iG U *n^(-&{_vq_[%:f}:,ٻ*fs92qm";%FЇJ ;"(4S!jE,Ĝ v]L }LҝK#_,cT,fqYo6Tlcq@},|0aARjje`b+Uw"lR>gêoRIJ 1=E\\⤮Ǵ&k{a["fHK<5-1yELߗ*˰V@2q52FQĬ%`z?&O}nd/wŭsnm+ݯzHQőz‹)j7UPITNUHSaA`VP(x3WZ垴j:~>|L-VEՈZ&>D0 $+snwЁOi7Mpjp5\nF$ u;%#C RKv7jaۂ$'_pg};3 ,BhMGRx48'lTxÑAx q(2JCKy]|BN#4 AB>"4ƈR_X|7P{sXPpx`gmmC'/}"oZr/%3.KF =+ /e|#XųrϮfv*џvk;0T6~o&4 v /a%fa{-/"ts*"B!To!| $W6EýmڛPM`QcA\4XѸ!FyClδ4}La遏>u<3^Ǚ[h@ء[&"m6Mo0>zU]O$Fʠu4(y@K"%@qč-Ӳ3!ZՠGd+@)E#k(apߚ'jN,_EŁjv- Q5^֤2 $Ũvuv31Mi:V`[ ۩Iexje=W(*GepD1..w~~1ɗ K28YW_ɲPWCJP׬ɚAρa0X#u 88q~NNc28?Ak䝶t+P $I80(`qPQeЭ Nׂpk)~DGe|$)#$ydMmL6ƥη#S)4\uP";L+hx]8XK<1.Ц BZu8hhY0)T|8(]e*X獔W=9u-v48lld =(>9lhl+(mp0qh~NτjVƱWOG%1!B%YiYI¨Xw{|c56(Gzl5#&PALJ@ے8j5HVgTFJϙyHJ&AJ3|ؤmj eJu>"'5p` Rx7H @BùE<*C:7Na+-.훠TIjIQ XhԺ ?:LQɺQQޖ~N;xTGd3 boXUFK?n]=R2QsѮ׫Mq*+G]pdWn} H \Y,. =7[}ӄ M>2"뒯@FTƩ%+LdjpRI!u<,#xMC N!Yɀ0E LbnKe9INPty3VM/&ֱDRiC !Bi b:xTk/r~| /S=.*9@2"|pc $#A;e *&N!Тj%7<*Æ$ N#bS<2I`&LN6x:l^G- Q}z09HバGYcp_I \.{}:xT ehȆ&$YA2Kߛ &h2v]aa`Aኙ]•aj=2:=ڕ'yC&h5x*c`[6 uebqVZq=RM 9xDhͪ:}x N\*0N+jZ u|R1u N+W"t b% ,˧Qe&G-w28J*#m{lº@#Wh>OUVV>pSɋ!,5.8|CɌ,JfP(PRAy^a"&!t+mtpZІf7Oy+=$P)І0&6[T]ڀInxTG$%ҍ\Z2GӪAsUX{Yu٫Ccq) )0OAt5st)ЊM\2 ڦΐ[Y) 3 7p1V mH@Vn $h>lbWj17rތ^!>(l-6H Dl $xsã28xOm*Ϳ*DޞP >I;H75McRIQ^ pٰwRkߧ)`F+u̦na!Y<+H 4p0A$azx{.sUeaZQu"RoPT! {a{)w<]dUEMhmyIU$so.{T1g]Qz*ffc-'9:V+*1-=PhUh pÕozdt '|_@6MHX>fɚU5^{|hcu )pnGTzv =Ġ2,%nt45=juEehʧ0kz$2e5 Q+41=b0lA&"M@H028>zÁ&jaă krZaI<Cލx_k`Y:O'+Sr`xo֤A"r(V?8PiR)h(iĠZՅja1ӕ[;ĠL㽚{=YNS.Ys|eT c7e*+pj^^`UvAN϶BYTOpIBC$1#c N)HAP/pYӷª2U)e:4yO=YIh<*S@tCMTȊ^ț>}b%P܂ SX` 7<ñ(P{R8I8ʪ٣] R(mYز'Zy,}!\|3.GepxW=FdJBf %<%}zg\d[(.*{k,Y <*SJ.G^NBQO2&5~1isYOId +x< h"J_$dl.2(HВҋHJ~aJ,(R|gP(xNo؃l,tT"Lָ2}ʏц/f?bgЄv r^ɟ)P?O~oݎ<.NYy?y_.Dqt% Mt٪ ɲ}c[?%OzOo!SK(~ޟy)N{-i #H8P&'(iG\F 3iۇlc[^/yP 82o#P?./,@t MRH(ŁqqYmcLT22 ď"5>\kݑr+k{o|\T0۟{YW'_E3LwsW=KO2V@~9^>E*rX$~2֊uG5$?m>lc Qa"ڇ`d2HVԄ1`1=& 0ߑuI,H{7 S%#UJ0ѕ{(M6\x! vDbʊ/ݏ@!kw9ʹ6+9fz) `E154IwiIᜱNJ'T n&'嗩v4 u4l6Z5Deu`iJc,N;#$5JMBpF*Z[=.S{//+C% cKUL&Jj2c0' y0ưQx:^^h #DjmއdB{$ԧߗ}G2E|;Zɺ \ l$?n_mZwk6sk<~ iI$Zا($[z< UxltW-ƣ%n'payO&Y{#Uf釉ǫdG՛ϚRҟmx^Ŗ?9Y}2t>T/OFswJ]:[å}v\oeone/ңj٣Re'h*1okO"z vWLFRlkW n#V._2úˢ$'HYd6tBl ~VEZ}zeݏ.h9pҔ$ӆИZJ0-M"3":ʌD!k vw݃xxүۜb`F{ ]lmBTyn-XDma 3 -Y(8BS8)sD,LAz(`Μew lxjOuxq(\:f>].wE@]0j Њ)M5 ,%FfDk.Se8#ڇn k&mm ['ǎR|_pP1ˍZbg$0L|\OLV0f3ϼ.ItzA{+?gOzR{{\mOF G']mpiBu<1/ G~6zc̵PyL^]r>KWΧC,29K]>zׁ?'m.ib p$KRkUź .xq %SY,oiy"64DÚJay!4doopQʥۙ7>?.b1o,V0fH&3m؛f,-WDg#/=7=0M%rrNW=$^n[L9 ω}A\CGg͡HnQ0?:?7ا\`{{V8Sю;pjmZ6.xw^u`Mp<\ByɼG󅳺0~<ɔEji_J$a'?-L$ s?M%nM# 2erR4FBȔ&`N-K`+S[ nl*wbjsڽ!{z2"2(=ʕüdّdLf$ ̆BG4L%  0p]D'Vћ^ M"9#(#90ddn5bPĿ7i(E\2:0uFB\q /0ԧڌƀg-#357LPbtf3IնC5MTK b5X5X^"pb1IusOy<ɒtĭM *y^د2km:;'=|GgŮ1Q+#{]׺ .˪u/׈a򟦳5 N0hPhUrŎFѨ{hltt~ ë__~r dx/1|^gG0# -J?ּkJ[v͵z:a眀q{W(]|z<ɞ橣,wYn|qg˖_'?)M Dw94m~WbÀ@wkam:M6֦tXk) a^2s1s\[ a{``G4d\I{KY.$MrkJm2&0Usa 1+bB\ r!V.ĥ r! \x)E`>ðPCt,ԇP\0B}PCPB} TDh$䒳dn+$ >s"x_]A CFR#$ s [giLrMk'.rʵVt> a4s5X4tF.(oJm$ÈV24PNQhϞ 0t<2c}&`0 4ͅ&`~KO!w/<l_z"ϧ.f6LNq@F`R1N*DF"_Kx ($c벘}a*gO`[.ۊ6 I-.յApWom)NjsY'/E.-جXϖ zp2DNm55^4:?شNc[Mo =V >{Xx:XFgd|fY*5&%_Ú>RmMՄ5*3{؄m5wB³g T{Ԥ[lTf5j*嵚&t]h[2*4Jx^+'WVϑ9:q.lօHσ[t2|{0M\nK;m~,\m@WKWkӁWJV Qwˆk>Ԉe,?g&^)֥يV A?]-GkVr7{ Ļr74х鄡 EWM$aLoV||u.$t'wENdNy_ԕTNq[ɴ3.XάAw|eՑMPTD\m l¾9ehT4Mz&xq'U{2m (+_ r ꡲu[߇ >}@-Ս8D @SYd7~μ&46aRm&]ސo7:\}Ѥ&ƦtdXjM] 2z5'lTF&"_`6]fyG ) HS*RUv[[1 -*4ټt#\J 8yK+-\:99~K<>Wg;NMǿ;6XfF`K~3+]`v$p˩OdFgxghE”_g$-3iRen#ڇn ڈ8I &#?~WkukK8}z kozH߽"dON[ޝ9.pjm@򜯪uˮwwӱ!9kp_-Tѐjer+,% } Sa:qh3`$3uޗV2Ƹ?2mK/JTpv2a}Yɡ+ɡ+V '7dRíM:]U6jޭ8rc4^K&ĦF+t$HuFجH"D̘HI rF%#VY,ӈ,6.\," q[9{iuL&rǪ gBYJP\)*؞RF㼍{7YkH@61y?r 7@v1ɥsKFC @FSFRPN-KiU[djhwv8;|;HrFei6Ji۷*8^:=~ړȬQ"{6NJ.0` Ml/.kU;`*2Vm%"ysD\<'Bu癹]Aq %S.,*4d"aM[K<-n$1Zq! ꆢ@Tσ'f\(G(~}n\k@[5JG$ q}!7v6`gwBm{G3qڪ]ji%-|-(OӔ1TX}F ՄIcHF-e QDs*5iD:[B`%)Ih1q@p'?{WƑJ$Ƥ#{6`lh:m)!)bndln>nVիzzʥ tJX/5NG<< zQ乆:6'D0>HA)…^fsP0yl(85!@uۇ˄+;牋¨,rv~|U/鐓O*BT3jzܵ?_Lbs1Q3OYQ_v[miJ#M`+> )Y@l<ˮTg~-u(|_17ǛV o:eGhq4B !E F)ZԸQ9uYh¼?c<$JUFi kB0=.ߺ4m4nB〛u)hQ` go"h70":*xamB 5 ɑ[kni'FP]. ;(SЭ*@.F&*|JXK;oIi^AtʎڂtPIG;\McӣUןGBBq~mPױdL<70|yT)*/͗/;|F>7"ڕ ymS䬢lW:Uq_GS!kS`)duXc-tСC:j|(g\]ci5|g ꁇ?1y-v`C:';w@MVxr۹5qbs9vb]1ǮWbˮc@=b%"f0Q N;"FP@%gi(QT bZ y4(m:]yî^ w ]yîaWް+oؕ7jHu ]yîa]yîaWް+oؕ7v ]yîaWް+oؕ7v Sו7v ]yîO,jO髌1t2l#[{ɱ6$Qs-9˝R`&.yok!qh{WqWpdmF'vdƔ>3[6W& 8/GEL5| R( 3 JG,um;/c"Ozsr~ƲOp1N{mGdxa0EǸ?OC 78Ŝ9s˭ʼnuXTÙCSY&&VQR`*ƌƁZ 1b"iZ+IpV;nr3@\gmҵ2RaUӶX=h}yYn0/aly sZD)3lnɋY;\w9lVJHH!ݽǢMF{ୖD37vz =h3w@a^y%ЃE/.ƣ"kz"{8V˨`.4Ar%\Ј󔰑kCX3((dQ@2~~g%ˮLӊ(tn!Lyg/+VUQeM&ޖGx/*e?Aedq5fĤZJOϧū֣S,ٷ PYUFf/{YϮj`oj.yg%VڈY?1l53V?S0MUr6(p*xSSe5f=`UOm]ۣ`?iɑ] ʖʫQHR^v;Z==X&F6QQο0WV{Re%ƊIzY5%V`n*-Xso慵.G3vx<ϝ)ξfcvS73I+~ %K.<MoA^!Em&PZVI]=B^Z%\~ikpTYqK]"sePte| WXjB2%@wf׬l}2Ǯ:w 6Zoڤ7},_>cJLSh6mȖū ~ ʖQCS?W[J֧(y/{| Z3UvIblܵ˞7B)s|כ4 &x] -o׷һۘ z#_M:)*T, XE&Z pR[Bk nĢX>ŢLo%!M{bQ1G=zZz(lc16 FZD/`F2]wt}Jz)I B,}0ݔ*/Ahg y;ΌRzW ʽ"J^B[#=sv. L`؟>tV;D(,T7ySw`2ߪdjܝ"O7MJ⽵~*?*,_rYׯyəUyY9%IO_NG2;d? z4oVkrA?eߛYv m9bw>Ֆvl|_5CǬ,ʶ\r#gpĘS8/qB҂B%2ISU/f~0] ac ̚>0zjb,YydcKL.DpCLDK9 Q Ƒ6ŊA'NǏ̃oj}~j)چW?r.hۿ}ĝO6$Lf H(T*wp#xȜ! o;[>~;w'5E2v_eONשk(Z-;|>G;+qVbg=(~}h7ɯ {NA3[O,htu5ػRMkְ]W}Uq qC{c 7isaƝ6TCiCY@gI1V~7[c&jA1*9sN[EJj`gF=@[!&=ך\~.7{KE|2{cv#|wh?uœ0bGhKsɕ 2WadP-T{ .m1a<#FD쾛n7 <f6U.ps0 {50',+Z##&cK"L O:V:+[Gp:jޞ \fL뜿} 4o˧'ch 0S NxTAIwXM|$x>2@3K`yHFǤ" 3âҌ;$$" (O ؟(w[y, CUgt<>.a-M^"B=0aE ԈHB `gr5?i/D"ʈ"GHd#*)IފN]{Yk[τm[?llѯrdO` 3{92B$9o.(ԇ4Imb`ָO$ qe86VrYȮVʩ:=A8בܰ@q|ސ~\Lb>_$GƳ]v,?T5(6.W~pvo?:|U}7>~/|30qyvu]uYn;pn4!}5͛f_oz8K,~kb]Qx9p|zݐ sEPW( ^+>sBt ^6}vޮKvҟn5bp ԑ۶zAkHW弑-rޮc qJ,C1<hnNqط;Y[u,@`D@K ֟Id{N[@O8$+9j.Hhc֧|hY}֍v(瓴Y1tHG#d]Wr|}64w܄֣4_R4&g5/ẉ hpuEQ&OXلejg[oHo2q%0ERt $ĶيjS._+L_cR7ͤlޙum9j,VoŔ:I̒(J{%ڍ{O*E~Oo:O.^ X:{oHu1FG Lhn փgTgHg(FGτV()5HT[C"s&QS"'47@`hR(82Ww5pciη|a\sHe/Pᅠ@0E韛evUW}x6}H8\ib>'I;(a7Z[p;o'=8n ~=\HfBʘ$!\0x OG'hA[upsl=8 kI&(h'_Or  ~O@\qMQmu\CZ0jZ0j1ROmwh' E>"|drd̥+nM $THZoA;+):f_US!$HIzoSN+9>Hʒ:2ȧ*!D㹕?]9Wwn6th/8_-_W |0tp׸ʎβS<@H%1EsIW`PP(!x2udK̞,bz\i8z@zFDH`6FG{3 ɢ3* b}s菁7TG L=ࠓQi)]d` YzΆz{l5wR<o v?0O0GA-ג ie&9:#,rC9KZYf j鰬:C򒠉 rs,&paJCN>jCwaԌ1W!)xʹ*Qփx= u]]_[viFG?v͹BAɔ;b,kA.j[#\mm&TzdwC %8PAa)gzwV2ȔLKo!9ܝSnNE7}laLӞ]wǹW <[cAG$87oP2$|0[kU'`Ո Po4Btl+=xtJ{JC +I!XN$qhed7i zSzwudiYxuM6YN%gs͊3傗A TtԊ<G͇ߢsI%Yj?dRkub"oS%#UJT5Vny҆ o"DFD%YV ܉}>6>rа/1{F<`(,0VAЄy iQʤg 5A""O#_\rM(RBEQ86R(QplTG*ID2ё܆dQ~.+[TkX^ c5v K$a G*w1Y*Ws9̖#"7c-Imjc`?MY&gcF{v32Ğ_hF8Sǀ-&sk{Db  W1tR{v2nio2E#xn%DQ7X`GVwK% pB g)Bk9{p?==뭩 T5ٟ'{~W 0y} x~| fGШwՕ1wWsluήn..QŢ3`'ernE(j&Ď;%[:jv7CLg3rh(<ٓGO\98L V)ou>ȮV)Es >?b+U9"OH5ܰcJyD}hvo~ׯWGǨ LJ?~^i9Y ¯tۢ7MW޼iW4Lo(Uo/>ǟnHބ"Jd(Ta9iz; }D>E+m|x]? ш!¯ngr\[Fڦlve8USdil|qF@ vs6ƾMQڊ#g#dU`~+<N#Jmib#E\ ZǬOQ Ѳё%Aytvk@К p #w5cbp1K_Ď\HL2lU٫o^}?7׫aŸCǹ9] 8fʙnYɟ3]ei>,5:o_f5M+nx U3Zh;Ӌdf 7̾^P+ <5*'EW3 io߼|/UkKnqV"#|y?黃*~ȶe4[ImÑ8y6{>{3-W;h%ͳE{9hSW<cΈ$T; m:㤣d8Y$-(j"2[b A<(t2۾<]eq0NV]A|Ja)",E"RDX*5Ja*E)",E"RDXKa)",E"RDXKaYԕ"RDXKa)",E%|+JJa)",E"RDXKa)",Eo^G/p􂴾ҒhSG/hpr8z9^QaPBD5B[s)cs^'e[,&kwD@O$;THZo .(cNG'DtQ$HRqH"3w| Z)IA #:2 .0*!D9C;m ݥ_KY4AjV - 5c5;ϣ@4TYbS4F)>F-Q>DC92Pfb4n.i0{;=NDsJQ#RKe $F #@HB{'uzf!YuFV\o1PꨔYt1*- a=YoPv:mFPԓQ 's Cr Bjp6+X9ǢgETE[Cg9ZKcо{r4 jLH/3AȁdїY2[VoPOeTIv(MF&cys,%y8e0!'TA2ė*5w^ N&(,#M4wmGl }JL)]e)Њ?8 kN+]\$E]L`3tL?+kBE1 0XX#V!F+77|=gDoA:=|,טlh|HMߴ~acrkby40|BEqDY ŨA󘂱_u4!Zn:sL^ Lж_#P@B>l&1nHtI@YtRjYɨѥĔ ['X{:7H ~DGRp> WGv$mUP\Xm :x±Zcn97 #$MRS-@A1>@h8v Nx t7Sj:u#%x1{R:0"W=fmq QO)Z0mypyS/݆ R~斢 `;BOӶ-Q GQtV!E9#4Aq,eܺ&zH%\*J XEd*f"| F(,]š(خ21 ҂Ȍ1c5LM MJI,( e B]t>#= ִ2K:Kj -e:)}4!58:C=A=%k KA:]~(Rd ‹Y3͟$i1Ǝm=YB4Yr.8b\d {+vC^qcB)7j> )fYt `p3׻u+wԓɛϠ0JYQi16JjMѕfޖF\6;BV nUvnخc{fl`E(CUk^UUE Ӓ"Rc$Njh_77C"4<N0M eJ^pK«XGEU끦0,)0J)D*<)59)z\\3Q-0E)U|jQ@F/ho79*ik`{SڵReNu_GŭODRFsn\υm~ ŠHNjЕv_O NA'^^to\2m*϶xi);8x:%mLךGS։侚;qAw4w+[+&a쥔Z1k刦n]6*Մ2Doxik܋\1@&#ZqÒW.ic,46׈7Ɗ0KR̲SU`vNKQZ׊{ZnJgZnk{ϦwoT lMj=*@gߎS74:p:*_&I_otlbN>}3HY,G'>u!L&dqGM+V~EinН^n{U8MvN39^$D290231['a(34ͦdsoY*1X*1i^mȆL5xW4 ͈\ )QCpw+s׫Uz*~]Ṁyy;u^ѐB b# ƻl.r-:U!AOR|90k܇P2a߸bH-Ysp;2=UU~=FQNd /ka@JN!u.Hp.mKk8'x ] fE-6`M@ZeBiwH۝9ϒn08J痞YZ9}S9@/'?WyeppLLQg;x 4ӡx/` v'Pޑŵ1M{S~ub<8[v|nwc}~Q˖lEjݷWj[tKВpMSt`QK˚!˛zi3rCW(`bZ{Η6ۗ]9dJݭNece|aڮI 'W,RFpѫ3nRxx_ݭ.\ݯ9~ӛɫ~WoN@Ύ<>;:߁FIֲ.huM4[tVv1niho4Uf IStW]=+Lj:W!if@N $/W5|{X+ D59c5Xc"nr"r6'ݥ\ X~vUMΝ16w{71[Se/K3BI>M@u&)hQ`g!n"x70":*xa7لzTdS!d+i˛ zZ&:s \ ])kr  ؖ U2ĔɩH@6ߦ~ =Mߥ#k/S7hY]X3FFV#!Cu͖ǒv3' R^a 1Y,-,xXaظo6֝0 |NMkw?.F0qv9n6Os*!>__>i!7u~I_P ßQĝkWF!W l=hGV䷲3xP쵢 mo{ ><^&.P+R[2Eer&zGXͦ'S"M뛯QrA\rBK.>f'EP)%W!֮| "`,J/mPƙARL2ʩ=%"f0Q N'(46PəK쓔(1fvsܝ٨[FZj$V\qށV(a} e|4Ե +nj'c:ʑ8xGe"5j l|WJ9N}^lFYזB_ڝ"]ܙnJUZŻP/t_uz^+ 1NWEf;jSOi!ϛׯR Aa[/"ßLYγ4gi:sz+'ё3r!0cTx@95^IfBtE"M#ќTzIe-m訨û! ŏ!w*:kJ֍oA1<+0x }ҽE} s×q0>,sso幊nuՎq!m1"4^vT9NӭUL@˟>qՍ_A885e $^zJg5mO3HaH&8{֍ʩdfwEU~(T%UdR5n2ɐmek6H#e@7uu!{ɎVBk)j })}rz,8!oB)JKT*k(Ըij$0*8) ?,Q$x .h1Eg2mmß I .L3=$B胂|2h]4b, KvR8;Jgѕ~^C)aZRd- 5d5[+yS6FaisrzgPJPQ@ѢG#wK Zn1˭?dR3=DzH{h0;%I"D)V&ie2`4J痢Ӆzuwo]ΥE@ťDB#YD&x28Pn'~c(%&@dR 'mㆩ,"#ADcQd8*b:B8Ce6͈3JX1P T1d'r``S*_yDev9 'zt,WLEh|H%o^`p)r/er +-)%J+_&d~)v3*hs0VX<:ha렯1z=V??R7y  I mar,7T]}<]z+)QɈV[Q Ġ!x@PBDלHq81BIƩy(ni^͍P:V *c@St%JXgs>!]>9"Ka[eܢ=8 A7c[f Må 6!$:m]"cB[HTGǜa:s[/taF'dY5@}JdjkI*n<Ԕjfw"؜$\E~k?+ּ`P6exf9GZ3%w_3hX}G5dvɏBAKުk ; MlX U.'í҃GGGgGz?#{.S"#UY w˴,P(Pg-IBd:ٽc[٠շhG5J$|FCpӪo@jȱӐ~z7-mg5TeWyZ/[NNVjAe9P7,)*pnlg 8_\iI՝W2rJpB&ƅm`*Qf30`x&HĖwROi8r(oܮuϸwrL_{KerƁU"(D.D+=E̤\D3&F<*ry>U:]u޳UWx3)[9e#;l3KU]LQ\kP6@m4`FzJ[)o.o &v3atvj<^՟172dyCxfZ(z>IRǨaBE W|es=oY^ Jtʹz"\M$M@0%8[BeBW LM4<7 }2qłz&gZE m^'m[3R2[KcG!ܷl/cozH)ζQTM5a1QwC.wSB x'zxg2ǿFWtrp0J('d@N.7ޜƸ^Į5DY[GDb [-PZ 78 D{YboŧT}O}1TP&Rw)Th(Ⱦof%:;;_ N_P) g8'~Oa5b޵a4ib3(5I[}~|<Mw~lx{=8}|F#\N/u;'cblEjYG{o!kZm)'ŭjj]3l}3ԮmfYYޣ!7k`ż{ݛ@mv::VyY'׵M VG(#l$7,Q}_SzB:UNMޮ|g_~z^_~x_/wqM:,Zn#%| 5?o4fMs#6Bbaj5Bq^30ja5!f $/_PٯWO@kĔl0Ɩ5nR,*4KA 4 ...}S[ܣ[llN*J66͋qF<܁_;S!~6 <2\!.`FE3 C+ 𘣆:HpINtG}֩wtd֍q#/ Q ')Zsfob\лdC!Q./HS^FEnca[8%̭I9]jy%R`bGjCqu^T4ȶ)00=,7z=+O/CR bCUeRj#GM4:^A0qHt U)-5H@C:&h`$Ebڠ5G"$gJDՎJ _^8$\㺥#Z[y57!.a5{|q^d`_}CMxЯ'WD*v9qw^Q@욜$[x%؟N{NWGon[>Vֿ9#TUp͸kDr wJhH9wj !y<*ԭsbzVo=Rk |lZcڵu({;|fuՖ7iivfw_q iH v@ L #攵h %8L8¨!$SB&fa8jlZ 3nѨx4K-|w˷ D;-_PM[tt(tPءfebbH[2bGsV>z҄bv4!m'';,_鹲vQ3l>!S~+q}:F8@1,IEYh7s&H-JK#j^*z=t'8'W#:w[=aOwR#N\f{q=vZ-@R:3~akԊ<՗&?gw~3R5rɨqڡߔ[FFEVY .`?k| Bn):&9_#Y7$/1(Å1ܛMIRr$`qvmCg$Y ":4ji9F\4'w֍?Yߙڹv_QT7=?]5IF|.V^_wqN'ԃϽ+݇mDS\D4 ?tkL+ڏ3$^6-+k9/4Ͻz)uvo 6lס}PI첏O~з<%s~ 3/>ӓfWU[x}ONtv2CʖҪJ;n7XQ[:fHy7QaCL?ס?šO5]:ĂzgB݌[C팶f!,ݭ䋗',lڽ0S^o Z?2ݪFFwu#{ۈlD~4:2;Qm[-垆zfu㶑%]~Jђe"Uؾ,s6[CR{d}zVCu;4/-|鈻9ޞP͎FW/!B/ZSUQz&T^6O)cŔP\x tjTvM\|ZTNw(Y^E `'N9{޸,JP̮Aƈ] q[#RAbdK$Ї`xUzG=@,!#Ǟp"^6ܦBJ¤h m!MQ ai9I14]!WL{9շ"T2agJgԈMۋ-UωR<ڿ'@RIR*/ s͝F 6t>hu"Ǚ I;' 2 ˙X1Ȃ#ds{5E=5AS+GjT daB}qMTR# RQ 6Pd4mXNM9;By0*>ҩ舴$O9lCXiq* Y]LEiOTE ޣAO/u}ӇI$NT' b ČȧoRB@8 LFjrd?i_-^HQ;T#Ӟ ShXަ 2 ,(Ŭ6F6O˻M\GMd\YCO,rul0B02J 1:X!!J-UO'@\1yUUacZPkby40\"¸uo,eeԠx2\1ytwyRL.x:QH!ny-햇vCnyZ$‚.\[=Il(R!M1ʟGv'jRdkc&; a}N^mSL=f!1mf)BS$8!"Tgh4uƪAv: $6}V4Tn;xԓ;=\\>pf( d68)L|I2}iV=P8;p]Zا<w} C } ҥrdԬUP)٭5V9i& 8S`:l jpܽ~hВg=EV##+ۇ荬[T}jrP2TZgjFpp5AF&Xo {M.O'[ԎR6;苶@FGvq%D)]f^ӄhRy2{߰<5%>_mѯ_ !Wi>+ܹ, v(BN]o)1Lo clvQzCIOHEQfϖ􄞼cEq'_l;34>{)bA }4' }#Ւax."|6_[l9N0.|̽)RkjteI o+`M*[n쇨2vĶzM y%߂ CYxn_'0k:3M;Fw|Jjw-I~)tR]WLY{8FPYTt\4w`&]&ꕪ5mzxv=H\ޗ`%1;?0>_HٵX3lZ2m.j;PtVk; agQʻ3P`x?(46pL^c,sRp8LWJtͼ'8OheT+o/8o/&׷?=zWHxzm< f RQ3 F ,QSw؈kl2N5ݖ)ip~]HsؑH!z#HTAqa"<똚I3卵)TCQ{0rHRIpxaR[EREE N&6aSMf6{F6EM|i&c+ekp!]/!@IVY'+]/ @t\3x'nj:>,8˨S-Y< Q0!S&jZFeaAu:^jF5,+MfCM$?hr;8O % F_X:yÑT 8206a …lP˒cGdTaXK"RkTKQ25 Dt :d,59giAUddhc',X"5ŢRc^+%@B$ 2vF2 ?;uzsu9i#3JA=!xpBQ m?qs^ S_i$ ]ᆮp­n4'gmv:I% >jkJgCQ~?Hp:SI0T09zhK\mG8Emo7CMmCQ'PCXO^~>4;~ɡ,D4, 1'F.P8bPܴp<HPzs?j C㉡Z E^3eEӉZq p@PD[PrjU5"CmnͺnG]o'Cp|nUCEDKZ~Bx?"f2#}6>V-ng6ZEC4Dq7-KkmB_&۲ֿ_"Ҏ1^(*DӉ!QabG{u1 6%j:t#3:=+G zH>9[s˞Q kҴ2GmKNڔ垱بې!z#Ñ{}k݂4YQʞ?B<'gl_tnj-yIõtUn7?P<"06D_@Wb792P+\Ёp0^aO^IK!eZKwHz d\$JANK=SmFɯg!fj_}g`Ufه:s̲Z͎1`,ZLsWL23>Oyт9ċs9t񜶁A%C9oǣ;QN1h{ʛZ:4hIp}$}m9jka)KopǑ5}&m('8o .RsQixU[P.U6Tn/$>iNfn 4"2p]ԷV3m#2s(!@1a۩zTԯfBÖR9gͺRnymRj gVb64ArUcEqDO﯃x)n!|HjX2—-Cc1`±&@5X~4S>Qub@Q1xSŃ%e`S_?)$MtiLqXt ޿zU\ &^3 aҿ+wc_'np0sHg[E‰uXTz9HIn}M{^>U߄Հof`d|3f G|w=Rua޺&q~)WuAn4/| #2Dϻ90S/fͣ 8o[=4<.GwbWd_V7t +}uOEiT1=~\}wӔws80}Vɟ\zSSߝWuW1qZ>cP-Zr1?v-|r1%P UN.}҄Z.ުG拉m:[.OqnƾejpSE2wL3֔~YH57YWXO.هu G `$eL6B;pOs׳هQ9帥m .y]e jf)>T7%&o|%Ol곻kUh෎(18D_pF V3r(|Xai1J64?wL%eli~Y v4{^xf#1P0Ȥqbo_}tU| 7ܱ@Y{ ::.4XqWȡH *U02 XH@5ذW?qdg$ BtLƨEH¨Hy?{Wq q_7R,9C상CBUxHv>!)QGđHi{i%h^zXɼ̚}N #s~Xtޕ^@8'Z&58n|l` Ib6\(Fvt AhF$ S `$1ΨC|ImQE*3z {Ikt{ohQQ fz]DiY&ѐѕL1f%Yr4İpE"Lz@9j.Q&cp,@2DIu\z%9||.Ӕ4%>//夢 KJf6߿M[}/˝)iMO2aL:ᎂg\2qrN>Z#%9d/N7ca IV9BbudP*ctZa8+wMtV($!ue=%}\BkHD+ZKtFlhvv:\]ۭy)I"omuE\H[˾R0uO˳ˮ.ڦ8}{۫ld,[l˶_<~<_7k("Qp5ok#ޟzqGibc%W֑>E0{;YIcOZE+XVxxeг1}7hT:`ɮQU 'ntzVJ0JGϓi(ʃRpO`ѸcEMb5$p8<sϾ{+ٷxF r~[:q']Sl)c {g8;Cv󾆦VCK6"N00\V0oOY(_O..K!Y[|Rj^nEBA\ ͜KIX^sA\b);<|ys\d q!ߕ⧎Z1X1n= 61[S$gEۋc*Zovum>+䒒9 ?t&0;/=;s<2-# ƁHd*YC5`6QC@ !'fRԠ#+Kش1DevYZq/Nq'\ί8V=6EO܄$LH)y_9HQNL)ntY78>;:)pY7zلM&n%Fi{wyczO*VԲfk 7U:-B(;wֽot){߂kya Em:o=ӭ6 {JY+86( JM1*~Jdk> kIbOh皚:i}Ds.0j,JIx) =A.s$n!)ȣ`䤝Gk%U5 EU3(A z0ltr\U:"?mGyC9_7\=^`wnr>L? )aQ۪a&~MC\Ϭw"D¯\4Tyl;`8vͨ"oE)UyQ%z{ ;˻iP~[`-ͻ @㛧S#[jYѣFjR!4*{i^F(d J*eYF&s%8@4M׾R?nKO/Or{ չu/YC/ ы٬ȱ97$u>c].' A!R![m+d2]^>AA=e ʄ7WhQM&#N~~ƅӭ ;--eud9m؋θ'ҟ7nl0ܐ|䜋|nu~W+ؠi a;^ y1QJՊspΜΙ9s9@'| 39PT$*uB `$טRcnGR.}z7X`bj@㊜E\1[ZÛA[?{t1VhY7*Feߨ}oT\7j}eߨU+Feߨ}oTʾQ7*Feߨ}oTʾQ7jPW7*Feߨ}oBꭴ{B>ɸQMh_/a#rZ9bs*yk"{))pqcVU12G 1FDsGRKs mEyX\psF#F 28S*=EQ#XO՛㚰1+CBH9Ku(}0 8dPOŬy*ug [+Jmh\2yL=c^+"fQ+>\W"6!bT*zi%2>-mZ{BW]1Uv(2IX^J+Ji4 њ"H.!R}JY*jno4X~1=) Q 1!3zʜG KθY IUπe ]xS*sCL%k=f\)Yg &˞g! GP? sy!d!pPqA%mC!2 B))Ȫ}"KC>GЋ\bd߃RICrFj03%YEP^( j|T'mSk%xGdMI/G%$9LyPF,%1*9Wы8&d4QvCu +mH H,k͙!w +(TJ9,$U |Bc"sWRKzv,כ %Z *xB e&0T.ќ 5g"ٝw}~`%A֤m 2Li2=#h碡9I@ZH!H)2h>+n;B5-zxn SaO-{͌$ϒ~rzSe[ƗUl Vs cպwdzcT8>>*n I(X1P6EPvi.#ĬYr.C*rEѱt]K'$TE@- }L0p]f݀md=/ԧlmz6/Y]Q! &ŋ1% ]fSx5«^fWGx%a7'NmPN&l0n Dyk=hO_qN\ D,U6>1PAk4^ _<-۔ % WC}J zT"ͅfCFFS;cR]ǽKFZ=lg҄_; M?Tه'Ŭ݀ܐfNSL53$$VH#Z\C;ث_l/ی:Jv^L';?.v*Tl% ̑w]^{5l1#24, Jm̾d=EFZA&-Eފsڵv]Ϩ$d: [.Kf_faIY}'40M>yDY |(DR FQ m&ȜB!elT/4,~rt+L"(dL1`D.򵩼Ƭ# lCXU{C:gXE5i%r(F N% 2 Uh7 C:UӪUM{NM{ JQDwI<]&jOyv_>ڦo֝[ɤ?7QƜ H%h08Cq3_7_5MKuo].'M'ٟ+Q3|XƛZa%a!<Ƅmen" ^Ux YoFeؾٻ8r+W  `%" /I[,iՒg=Ԓ[RIjyXRթ8iw[{9yX{kQ./OcCf/G}.)x|aiv6 -\6bdٵ3}5yC`\`Qևw/Xx核:7|0 kK6P*c .ȑ9OSף]W;=u@Pl_GGFeA%_~6uӁ:x<SѷkZ!kv۲J/;XlL녘muC&鋗˄emĕc0!R<5 HS6i=6Wo6JY"e5Ei}d}N1AIcL"1k ΤQ78x2pvD{w߀|n׻;4s_&yؚݳErE%-vr#nr:>S 'c.CՓ#ml5Bնdo|sw^r?\m wZVx.n%g+x4[wq5tͷꥢe$鳃!|!xgdqw';BJrG$5sc6*S )u {'_mݬGںlA[@Eh1Jrr߁úP1 Baԏ-Z,7IL%U+X ̙U ǦNG2#:]NI/Ū1HoZf _]g\ލ}O]z9QdSh/T‘LC;Tqd#[dl,\:ժQŒG! RQ]GJTF-٣H1w)؈xlyRƙt3Z/M{AT+QDo["d_P,SC]~;\zl`太>dѡ%lr0 TE|KESKjt[hP>8f1~=C5ڧcJ*:T4e+ZKV v*[UUm!(.i} zy"[!6t_;0F&-M"\^1j9yj&dUD4L ðdܗi}Ia#aaY`w0Ηߩ)i_@^R!y矌R!uLNN-Mg&q|rFT*NzR5 9bq.d_ÁS}WuW?~Wz <+UGcbYJb\P(E]mJEL-1o[9qfqn6\a+޵wg6m(9:/LoaUC?P1HwbP?19/{=fASBUv_:GΒ2wi`zCU.JZt.ri/~ɏo~Ƒ߯Z~>(D|0 >g/?QhV`xyY?nzQ?e|8muK ~Q]MO^#e,8љ=f 9}Ƙʻj˯'f,hBq(WǖAtdNe*b>\o&޼7?,dt fgxĹo?cqm9|6F6K}s7<ӎyG?~o?/36|gdsyuq:xџ~h> bN"}f#x׳kg_@.o{Nk 3_fuuM~7F&ڥjy[&~դ37ӟ]rl NV%_|A4GE:L/.Vw6.J?I )bv;骓tf'k !o[!a>V_Cii31NG9J_˖ZqHkrlTT]lft$y/?/lRniQ{k2ZdЪvĺ]}⸹,>q65a1d}]}op,w\)]|} 1w-~ܬt|ZoK%-JE\TCɫ[S|ˬbKp]8vfP&L:TJ^Ä:*e2lاCכŅES$}{Cl,^Zoۢ`-ZA+ȍvuM֢5}y-ב羹!_jYJ6Jk婘H3WOTj1"&ZJA(hffp"lcJ#MBjMrJ]?[wxgǢFں{t G@bu4QEz`.>$+Cm-޴:fxyF9 9SW-~'y/Su.7!U3%I3k9UQJ$VƠsHr-꜌Ec-5K)Ydw"R@]`Q-n%Ġ:*x{RQ]J u;"TF%zq UC`L9&XE)hV XT`t[ x4sXp9[G`CQY?0QJka<*J7%d.&VK#ǒڈlt"gulLKdulm]Auʰ&TQ@BB>84i@g9nq|a[b_b1kQ,8_W )J(VS;B 9!nwv6<9 k/f]D="߳m2]уi!"k!k,yi8(O-0d齒)HكIrdФtUAp1'8;QBBgsyPKN(Y 2Q AA>xK!+]K}q@/!!d<̗Me :Yntã (nCep6 &Pu~T& :UQLUJ#m+d!*'QOk"6ڽ7m=Ũ:"Jy Zri6n5e DpcU3ȭD-hg+I u @9@ /BAX|`C`yT `fgVyp 4gIwʀG[? =m̸I#i6ÄQiXY7c;gTS(!c39!X[ဟ?HuX{HG,Y;IC55LR'޺1 R)BMsVTE*62ߨre$k fսJP0? q{ihN.h=׸@xR|>NNuu]$ &nn4GOJnås0JPp$3,ZofMb6My(Y-yp4X$h31#@y`G% A z XkKC xy܈Z&?ghoG JT9IVPz A AJh@234# )76eq^ܳFV( +Ei*UK\ (\uB߁QGK!Y0p c,hr%b$ 5<\6Q-`\SclFe#ЎQ`348\ ZAfEL#Y /yU. 6m,C3Ejy߷zfxH"%,Ҫ,։tMwuuUM%X;]+=xP; ID HW+k{Vq nsP&̌e%򁨮xQJ0"ڔ#=^@XKBCN'XD10e#>Aұ^O=`a9`dPKw{25!lDXm(>&#)[| .DĮVKcکκ|`|u""L0e| f7YADptAw(@JyJO`[CjS  ND4XoQ;&j0n*_`VP w WBUYp* K/:%М&WMJ@\1h' PSLޛ]jnXArϽ A&8$dy@$UH ʽ'$S$T$@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@ORC"@`ER%qy0$Hkޓ@JRz$_H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $% :ĕ`H V'@Jcz$B $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $z$%" TĵPH }'L I X`-BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BICZoGHvVP xR5;a0鏷@9ą`%ɽ@Jғn =c2WE`\qVv+۠4h,͆~=o_;5R+!OUy' :{9[p~kǷyBpbDSO :o^= R,#(v=攃ε"JR36C:.⊃1EZ^e4OLsn+dv~O^ J1tT8cgEKsWӼJwh g"OѲp@4}ӏF*#9iwzyf.XK$Y~(6y^]ڌTڜQR ߼Qy#m&WYUYsUhCc`BmF#pdw1(˜D_%*r%V`^'afRt+_w>}ԩׅzM>?//>)Onl,$QVk\.TێW6 V:SccXJi%lٸL*{I1#].F6\ !ۛyϣWE gOھ|h*,umOR?^ ._tΆntQ.GΧKY rPRڵAo*M=Zn*. ϴdDy1\p<@foLS~HƊp`E\}0i"e`p `%\q"}AO\q+<a7Y^w/.R}zsrJp*ZeP.L܍P{Jd(tcB.*jMҕ-D'gDt2mO]pA6] `MH~-Xk;mZ<ЯZ;&BKMgi9]ߞn, }7v&V*$og^5gޅQ N*ƵIo%W"d"^-XsT6](\l3 9Qj+#;򜐪piA,87f#ɤl&R$#ʈJh4S;dFEפ*bwE,;'Ϳ**0mzhT}aNku}q.Jۑef/ GYLQ_ O"dfsI$'=$l՗п7B&a*㼫FebH5I8CYrJA20WP:ƕt*ĝL*&VmI~@lQ0Fq䨢H^][B[5bN"8͋;'.&ݲFU}>YE7/An#|ҍܠEPpѭVdg+ YJFH|^pf/)Y₰kUɪX;׳lw 3wv_)^"Ik>,z 63ЩWIϮ%~UZ}m^xح''94oW4̂)Rրu`IIa))'뼇P!3;sa)JQƥẊ-p5uX1FѨ1+~ *yO'~nA;L W70M.oE],PI4 ahvLbw0pW[mo~\F>u)"uegbP2R-># 79JXo! QΕ]T\ٙ8ϕ.fL[ZN_*%9S B()2bIM))$:3Sv͝hx^ՊQm+y!9W-ɔeauM$2ͱa ic̵&/e1Uܔc:w囫{uo{_]B;A/m۵_;y"cSP)|NvOt;ae)Zj<$m^{_eX%8RTd嬋I ['mF֚5S?Ӟ<]gsK+5\'zqkU߮:jڭ'n- 2?-,./uVTe咽NErs o[f9/›0bm}ԇYx1\WǏ`cy7qsɵ\W }36o_ˊ,qxcmއk䷼7Ə;6z3bz;ԓQ 4\~k7$ V8[U0Ոŏk~7 !u{6c>9swmP Y3P+%hHKH..շSTvHgq&.K>]>Ľ"&9S)2/5 U[k4- x\e^M^f+^y\6|_]~_<5VxtճBP0arf1el2:iP@ Vis[6:H_aAǓBB`LrB'`/SVwEY9XO] $yʁܲ0(&-W~-n  mE?n񩷳ubm)bCNާϝ+WgOޯD'˫ŏ*&)3x2ZWA/{49hц?X+.` I+m3WwKZhd|~p-Fn-̥^Eӷv!Qm2g?^?h%!tjW6,`"6 zbYiGmv/2rwӇ\g lQy's9xm.8ʝ?Ǵ3h^ܲcqS:Y| ݫ Pǿg?cՋ|_9?_^7q m*,Z?66V?~Ӕiho47fK S[e oO/6*o6!%|iNCMN$j[˱h`0vݟn+ł 6!|Uy 슗c c#mnf϶fW%o}AqD"C7z2%Gx*YC]"QzsQ0ّ%>%l×PfCAJ֖$wY$y\ +J\m%/FiؾOaS,OdYJ8{Cb oZl!WR-PkξuҚZX-VsNUwpRSDGnW 756x6{KdDeK999X_i֤ GlC2>b`1(!IW7zAox*'S9y) I1cҾd`A[]R9!@ 6}m5rv[e =cQ\6闫8~7*c(`H"3x$liKyJ91*g WQD{ɭ& I%}F,ؤ?fnXk2 D(b'X,$=:w*+%1[ӳI֥Ў m1ǝhc!" l:drV= ¨tl s,5KAm#[/Y,s9*(SIoQb[sEܽ_,{'TDB&!m%B9w@/zz.WE^(|H5o@sൌ2%1xZ1Y.J-4tEZD.eSʨxX7"8+KӢX~ ktDMIh2M@-ظ)pY%Vǭnf%t! Nԗ a((Foh2}:?i\)KsdE, 2 i2e *2PdeB3% (wY땗G<&F:luz (X;km"C_hh\5rvU!ti)q͎twN3rຆOILr8gV=9:IcX]ۗGɆIh,k, SN?T~CV ŘxL>G$n,, +ALBzX XŁ`*)SeDC”E-PبIb|HY% ٢񾆄=r2;1g9cTT p \)-F͍D#Y%K0(B2%^)ioZ Ԗg3.Q{ ?{iҭ;|Kzo}k`ٿ}5wlc} x"6"xk"BT~wml[ J|̵Ym.T펮x!Dn%m:5on|Cۻ.^A_1}ƋZ7Ժi[os4S[gZ6rDٿmww*Gk-C)*,Ǽ﵍@<xQgFa,ޒaŚ/,6xtQeYo7 b|7ZUecE {\K H%7V;q8"Nd<Pq ǗeAܨ^ڊ٣CLf_`RL˷~ȏgI>{`_^}_ozj>dt8 YI뫶i[\?l^Go{fm-妗n KSS$Jbi+w)ZyAR \(E}5(^@6>ۗSݔpX+p|Y{Vnj7E_xA:3k1^.G.[K f/ ˺xbB1#^~:Xz_UCUgڰ*&p?m5ӵtQ*wN܆ _^p󸰴-v,54#d?qBXut7=3 L">mT1A{EڤcQ)9,bz=#]Tx4B] ?Mܮ*5]~ngt=u B? Iv3幝 C{QrM+u*ii Cc>|9 b.g2^!mjȘ 0E'E^ܑ*s?(;GV&#cRe .NitEhג8Q V +`0Y/|J2.E]!ڭe#iqMIyhyg&eӷDew*I@d>K"xBk|C3Y*M>O6T\ѥ\_׿D|k*?a]:K^Gg$-eI|c?-]h6t(?#@pXt) ]4}ǺέnoI^MOK UЮ > ~_j?5?-f]>`w 8=`_~CFniѦ JPk٪.čdEd|Gd/,:qk9Ԋ35> Y)E"etSmxu/LsDcQdU٢U:8J%LBr0&c6jF]YdFtt>io4.1qL(t`*Oʖi5r777U*U&BU~$v٭|[r1[1dA={Vd,x2I!4&+L(,I0@"o DB9ÉAq<'Tj왈19#I'#yJ/&ǘB ky^&ˬTb:7&5 r BKG˒cb:">rn0`)z6%01+g'0LnH't^TXZhks19-m=2yA9nl2&wk$CYHedlDv_eFF(IcfFhE*{[Aʼnv/NEXU B3 kxdȼE0h z's^QjLnm!}ΐ >cd7B<"}UCA* >c:NRK"Ut/+`*A\TpT),AcwuH1cIupG F9Q2Z[ SxQ !t\AҚŵY#sqVI9 nt,ŵ7g1~Q:,-m{w"g0tdfCR\.Lsd.4Z3R9(tw܋;h蜰-Ͻ]tD!PMBcV%lwLKw{3]kyУ%= lCN3&Y:٘I褍EkK)2Dyuu3eSF0FIB m^Ʒ bhf?/ZOכy\Eܫ=ٻ6dUÀ:`7^#bOkTHJwsš(I Y>WUM뺦)1$z TH.TrepOuAU4=]96u U6tն2ȂL 4<|:TWC;r,U*ՆJV G^rt~_??˻O'Yp4v&]Ј@ݏ7qijk^) ~ۑn8<(iH:Yh $5|ò WW: *\`P:Dv9[—"Dxh>=$r8%6Fi_>*fKp8(Ʋ$ ܁Od~V;GJQofL>_(G/ۨSxEFʐ` iPznlbϭuKA:%QwݑF%"1jFyu䎎SE8 v ˆ言ô?rթ$#s7c rHԔ1тFQ4`pHp:3~FΖg?k\Fo7‘$r2giH׍AjvVYB}VpIf5WZJ!0Zy` F!$;+1 y9i CYE 62( F刊`) `=9sZ x2bݓ瓓d-6EiYn+zH%O'sE%0Ky-6"VZ[T4J$O>*.YTCGF3HYfҸUr:_ȱ'B2y2m|n+)yaRHh m!M$f3> s?DL2Kn|$EG/XߴXޖYwp}2T-&I)>R'@RI9X0jpq C9ΜPzƹEKL_l|"VL*%#M?s{5E=5AS+Ga) xB͂>F&*HMq= A*!lhϲ%uߝca!4F9 c+5N,p5tt_<9w48ڳ@N~.[#T@ ߝ1L ʅɌȧNK_ QICMA-7;N:`2]W ҮAhNhPPBcC_JςR^pdfmMf*P яb<ٍ`Nw闏 $6ڍ\&8n7jŞWCsa7*%]8HEI@(K lp EK>+5-rGo[zlYN;A$>ijRޭкm΄y>,41"'TG [Cm/Uo7LDMD5oU"fv5M+à "]Un4L1^ׯ7Y礦F>SϤ*1n5%iaAIG,ל0EA3XX~[eiC, KbĒhYDiepY c$e[ɰ#({H'r6ȹQjA;X+\$J!RH]l~f#~sU u#͵.mx1yy tdx9a[{z~nz%[jGԸ`4C?.'$퇜i,B$+SPBcNyNTK6r cMt\h$xC9U`$E9ņ(0  ]ܒ];8$T6ml"6oe+RaYWb b |FWuOa|zyF:M]f@Sϥb(%ˎ©D3۾P'8',|Ku`a`b!4qҋ׳iYЦ*/oVoze<0YRxo O2Y9@ x_:o;cƌL"^˘B`hnE i?,{!{삨sr]Q&2Mv2y+5Hjg!pfP lޠ"+fĦTGy%QqE Y- FbL <* s! vYI(X Sˡ)"Fny'xRWSy#XGG&7eư' -OX۠]^6~rkJ[i[b}TJoixd>Z7  )Q[M*Ffd j0 ?\ \Ty][rלԷU#kV,;: ||82;W420} J\B:`^el.D!V^ZqXBg 6(S;$0 rtmHё>r aw6zyV!!T(QHLd$&F:łd9͘9bc-[=s>9Isb~%*t.JU\oP׳<{;[B!`{v] ^kS2p^(BU`FIWEU_^!_^ _KRDBRc) 5HaAB)H ":M/c# *pബ3ZSu:*_} e$ T5}d _.JUpT:/sxYV% (ll_Sqyr<׼gmVGhسE+)Ų ~K~St۰B-ɕg]搆2QLrm#ZK"z4]JHKP0IaÅ:0a< Yo>O>S*4Hkyq>9 ĕT%y>3€`dLӯAnwaoy'},Z UfX~eXfm[kڢOcY~0K盞o:7XHɔ{(RR(Ŝ(aHI gUo^Ysg[l?VU7Ssew=g̞1{<cX=#3VfPwmԲ^9NR 66IqBU>뒔[%bVK-<|q#:=#G?=2Kg sOp9^,^gLRָ\ V>n[L+2~{+eZ-30ss]SŃN^9Ikth]@yV(jڟ8y3md}| bĝ0B)߮ /Q:pXt. N٠W*ϑ1E3Ys> |6u^Vuڹ~RHNw]{Wk0A{x覍Y>Y$G+j:Rj f(H Dc!'S~6` icܗ% ])elJa^iGjdrVk/;^so>c}naL:3:?"]4s_x|J<1̠\R_A#PWaK)߮ޢkjn92 D\6.4I?;x1N}d_mZ5od]RUZVe##!-{WhKT bң]Ж>PFg D !0Q NsceS_!FGW#X#K>+3L׎TV% q%th^T2 T16DSrRV]j*Z tH2AZ򗐊EM-hA tTQZtJ-:uЫW5Tf]1 >ƌ•rG^s +XB *`+R)`)dhC־T)+e]eW[B^( sʪ Pj9 8@1Ε*jٗk*Yكwk-a@srM/SO L]6Ju4swRTRv6d1'p6qAuEUhjQZJ F.Fs5*|tq\qfF`йw:1Dߣ6_'o=}}^z΍|lH|P8ZcƖX œQ!htlR2"'MT.A[/3hA/ &HSH!ca=hP'F뚩o%cĴώW..߭CA]ы"+^˅'1g\IRMC脙؉,+JhW Y1[dN AG{'SA—Yc5TBS`"ZDpA`glBwA}g(aO<ڽ +<ޅҦ( &ZT[S%LrLOZYLe+&aV>oYؿŀmMSRA1f_!DtkhK0:vΔb>8S)miXAzH)ᅲ^;3E?A>?ki֑/^mho0h;"xt{WŕX4:/nj0=L_m{0ѿg__\rY\鷺H۞﯐_GQ3j LF@g+ ;jwƈI)ӝ(ދ'Ϊ3$Rsb{C]O.:- c8ظ'Qlu~8H<70dJ_/~[. u4:mA5ݡ;%C.Ԣ1gS;Gϡ20ݣosv7'ǻ-k*3bIڝkQ$6/n7Mw'_R+Fy޵#~U@\92|A,<9u`[f&{949+GQ\5ꪹ]}6Pᅌp:jFfVY׊:቟gOfG4>vO?|ݷ߽o~㿾黏߼Q㻿oEVB\EBTku@peC 5<5S e/OfRO?]T+ʹ7VDq!M}Mʨ,D(?t#V.H]/"!_Ċq~y\KzKB`U}6}*c#]D' kjj?)EU `W㪃/+f-9$ˁUHE[T _ܬ?J7kҾ2fɅ9)mru@9MʞUFoYh®?7q?p{DP1MJ`Torh7@XȂD`PBmTx=^tNj~` ue._*=U'+[^t+my ֲg-{[0>Z /͵v<@W!wAС.Tdw(IŪUzʪ{&yqP@QYqRmkۖxxtYQ:.=`Evər>oO]{F^4\H(FKobZK$صfrImE<*mwᝣWR>~jE[g n/^ 7߄$vF%⼡ѥKI)055[;wdw1plm=ɡYn94VoZ̓V쑲4D  c[> \$2>ŖX1uXT4*Y 98?jYtFæZYEZ*wز+tB)P#@80~;D8n\X7GD\v?ۻ(-H[}txwv:|JЗNV+96\]c(9`°2A*Q7cbS%"9EŘ 8ZJ!ŚZ*'^kOM<9K8q]pv`o97)zbY>g@R_xid]ý xZg]NYʿّ k$g[x偞$9{̆'kq ϞL%V̔cvssd@:XawimMdbXg_}UђVb˷+xn0rns&>ҍ™|=׮tSYax0r9/^o癠zpS&7 i^B"6Yvt$C `cxs[finhfVqLJE,R"Uu֚l0rV@- .hMkR7T6] ئ]kx ;oӼ KzV,*Bg>^mEwiUE:qQ{d༁D-T0}~d|ߘ#O/~_ 9?'E ~k0Ob h"s&J9G@dU}m$ ՜If"WEqCUy&5pe?R붍kW%ebR.Hi(sE;R ^Ղd/ `\p~*ocڈ9VU=ȹC˧~oܣfWFE 'vYbCD<o"d_;“oVǟEOj[k(yK98T+HT[mxcOXhȎCVж$PۍjTF|-欂 DΪ-)Pڏs b\8DH(5`ܮD)nڭxTqFQI{A+\fJMO4wn{+rVSQ(lL*ZM avXW^TUcdWE> \ڠxNV&1Uچ`Ņ (n"L9:ʭJT[ fveo+c[{E2(< @΃)umK؁o^- xj΄C؊Mm%m+i)i/ZA(ړvV$#tS)߂sKxfVm83Xlxp&~ -cBʩdm q1gڛHWnCD8.<{v;1F쭏'F|B-jTUYˬ,sFv$ZP2ۉhKD'Rc) 5HaAB ":{#ţ2l{O| ɴnzZpZifw*&r%i; Em|dZuLM ń uyzu]ljBZ]zn|wi=ϵ\Zꧫf\y_xVqu-bY !3֬_7]I}SlىvK؊^Xnm-\Sr}l<.A|2%a`p*x {ϗ{E=˽[3xa)z{W,ģaB}s/ȕ w7GH߈J?)LA. @Y%_Yav gx;,⽿T+|Py`߳{<[&Uӛrs V.lYR' +jaݞVO~.fuJ^r=|\MQ213hX[gb<#u4^¾<*?x sݺ+qx9 2R6Ǡ E!gxJ<"9S"2N\J>}ո~$}pn!f>L_h<=zMҏZ@þE^٣;klwjPۭ"zŽ·KsS9sl:ݽ8]NnvRnd =6IADX,-,xGI4 Gڐ;YAc: 0wƹ$uBݧC%{5I .3`I'#$sdv s$A,XPL/F;sr[JtG*Ѷ v~\K!2쎏vŲ,qMj3wW'aq IQN)93Z'`w1op2S|@:t襌4C—[y~v`^!,;`k U cJ#68R$#/g)Oc.*51V{ۮMT.C=ŗ4dy\mä| hM]R_@#P,mXb.&6:8 Ve!I+~&=mϤLw[?~,69V;rS={ʶCZ[BZI iwy,~seKilmIC<c8XARL: ,]!mTZl㙵_# !JE T. hv &"FP@%gi(QT bڽ`ΖY#yaCڡBa3>ȿtW贮wĩ5I~4&y?}O/5u'2<,jT::iLZGG q]yAyYdSxVNKrFM$&gF ,fx%( D\CWqKWΚ":#8nCFjcKVϔ7"kB:a䐤XHM9Q!(8/",RLp7@ nF/&ѰsWj(j$Jzf%LcU\>ĊJѬu.oxL*㚑QbczJ9ՂiΗ( Q0!S&1jve"BlkA8z- ֖Kͱ#2\*밊a,%Vb_T )d4&j t"SvUj:mK2ւȌ1#NX)ה:yY P8!mw]2BMh+9W8:w܁X0ŷV ,OC1xoT{ 浵y$"kY"M鮜F"J^ _\&3y!i-LAbA[.ltB3 :kPGTuLPé"5Wܦ#u~I 0<`s- O%+07};Sz8$*8ʝ7\ LŘ8P=d:FY^1Tk%ij'6}9حY'=AX_˟Ey~ jQz[z9׽%~6׽%rfw7-QI޾ B,4zwj~G٫ׯүrόz5:y?s@FOzP Z|{N@] ŴORTf61o_g:ԙAt`+ZeK6[J7 kr}2tXTّqČ.zMKT{3xx FU^.ߍσs~!XצfˎSm&mD9ig80L`O"Xg~OZ?@DhO|GRQ%>uO]S%>u]S%>uO]RF%>uO]S'k]Sn(%$"]S%>uO]S%>uORi\ ɍ|8H s 4,A ,"]~\߅ǽkB^ХH3osA#Γ4׆V` $"aHA)(}3sۿڟLa<}W꿕@0! H [N.RW =w\T{( ?ϳih|U͜ldZ%Xٗo7LG7. Da>'`ȺeSӔG̦6]8RĘP,h$?g7T_'o.Gl 17'ĜANN˵zmEjYtjKPH,t$Ɨt4 ia#aG UL|8_ =wޛ\YBodӨMse٨t&jIs`.u}1 ) y>Ά},u*ՊNįegA8s)>oϏWo>xt+:NÚHhc~لޯ?4m CS^Nzc* 隂(j_&e|N6ӐtsEWg Pl^j&h\滨`t/aܿ.>}(M~" xAm6&axE̖T>1(qci \=7%AF=iR%/c39M m)_o6~ma#Жy\ H(gv3x6btێV[ݩ;fS8|;[fRɝ%7PKmglɾ!B ں}sn)J"f~̃Ͻ A~j$]#s̅r87<\e^H/: qrh8Nv>ݡ =b c RjfsTMB 54Zb0#M[%\0baH1,)V2""&ZH0<)N[#gͳ5.UzC8Hq"l#e+tsssQ:YM&-'H2R I?{WȑJA/;qIy]4h䩖"9$nUC!Z-UeeEfEVdUX`Xƒƃ)bIj$JL)~^ vPh4BkM̨3hQ9b0XJB?*:drEf ^^p-avכNad->EY+:H%er!^.rmq .$́0ڧ\Kk/gF* ״s!B}e?!.9jy NKť!cvĀ)=N; @\DK(F[Pi{ 04@ONŵ*U=OF{jv"ȅ,œ-^4dVs a]j QA*_R“"D(g)nn4 bl\9a^rMɄ֖&8).ˑ)a3tBzRȈq)ˬ^b*X΃Tg {"$%uDqLCv7r G CyG/Jkcz%`粆f@SϥbLG[ZPTJ{P';qIqݟV'X<.0p1YӍ˧qЦ.׷K4OLq0&]^fΗ<'K#C0!i([X1c2DDk45[rHy)TL'fi'w*E$5W83H(>oPJybSM(z渢q`@IE̱9NW,ȍ$Z @"TOp)ќQ[M*Ffd j0!rkll֌QJmB鏽Eil۴@-gUk̯AizAeXhd@arg'ZK(IZ yIt$LEt!"'U?fiE;-xM8"E.qVqӉ r B{_iL {X'cHƴ JBb%#1 )DX&x%,JXkD$N)i/Z 8ϖ/ITVSN~ #tC}L]NgsTANrptTl6W}k7^s\]Ik#n*)dFRTDc ))Sz+isg;8K'0O5<~b+t@Hmp9HysQ bOUm |T7 4Y^ܟ?b~`AW:ކ"%~1vR~C(> \iAxv9Zm?ojhcUml4IO k 낡#v[mtE}}tA?-<2ؚMU1Z I>o{ˢf9'n2`K.-'5Rbnx /EǸ.gqUۼ-J6{Ze2&KoӲ G߂.L dRKJ1y*gЩd}`]D0 n/<^~2@i6>˓Cց3zX_kahFi9}ԭM"rk a\Mu%oTދU賄ls|ŒrlϠ6]j XlTcXa!3A]r6b ˶y_^`,)!dI6]d$JB3' ƻ<FWKؾF,yK ~8GH{|2:XW#RO1K)a98ARLX]b-y, '_8 %oǫe vQG"`"RSFDD b FрG!eLD:[z)զPvAӠ,?m?__~3jX+LN/) ̂.䦓,Yydc͘"k/%%D )XZYh6䐛d;Yx}'pF*uJvnV5ߵWNs;<w'+$N0'Ku'-ruqdB1d/?4L-,av)DԓLjn|m78=mdi |tZxba!uԘo_Y[*-1p=ИOeN`8kDll&zmr2A[Ÿm39 #暨q&Md`2=seݴ'N^`9IxVdanUqa›i$^'>(q'%Lq:vKjܤ}.P N٠Oab|֣D{Sd U 3\HPZSiBJBٴ|3 {}-vq]K*(Qԝ_6w>TI/.nQ\NInGq|/ŷhN+ṅ3pcq?HwG* =K5Eƪ>,Fh{F Nd>&;1١vkmF_969ë`MMmߢi_60xm%$MImɖc["n|nsyuV8d}Rfy)* C, Wۦ*1xR{#%oqNU SӰ_64hs7n|m-e;d`vr,jj-l& Vvaz\_N|#]uɚr=ɾ}\sjF=%^yQ HyJ3yG֜w8F8P 6|{~#+0cꌘ2 wU9QNcg^TsmTk*8ω`C&,뗫dfr~f75$r6+U]_1i˅!0\63`kaW0Q l;.T~9aabBQW\vE]j-vuETr{u ՕR]5x~XBOWxOsR;ɑZk@ Jeho❐0<YU2F*LHM.2eH&/&C nj&&Eoy~08 B+Pr wڜ?lycT0OcW5;Gn oz:tl/]^>f^A9i Ihz,JIJ%)Qrԛނ՝M7L6=-qn*ԓ-^y$IhRj:[ g)ypyLQ|Sٺ|񎊉=-OϿ^}r  C>d^zEfR#Yf0yϘ tz(1l&%UdkY 9ݞ橪!QJM¨Xdz{rHP5Pm‰5B iJ+ <; Fr/*a އ(t”( ! ]JEfM763HK91hԖhYJg䬘Y&"I%Z/TIR 8+3'o0RD'6\|ؤ8.S N 9O9HBbNjBd8Ƀ7s=Rt$]Mw vQ\5Q}Ľ5&1dcs̜e)KKfD"HNDkl"K,e@-r2cSڳ8qB&XJ6[i!&-4h.J#P.l!.,Q+֟d*T>]X+w/ۜ )PB 4waS9WcjrVշi=G^wKM};7?$,h*/,vvv֙WtcnS ~P)g 5K[`Fo%S$=ZƘj{Ly`.rI -IeNj Sp'6HsRdFXƕDk8kC>ĵ8rUJyXVZYWo қ <[A=SRi׎"}sg0_BNX+p悕2'e} ` Q׻S,"C›`db40rCZy% 9oA}gwQ3>}Ї#g͊ ᕿv4#oʕҸDz4Iey>0]H_5Q%fI>ؐ E!kdY00q 9eqgߴB$#uFtO?Ƥө'7%'rX.12Fn1B)mr'^8d<)\Irw~J6b!Gҧ5d2J4/Mh$å0h|:: +=EMNkںM׆d+ޓ iLT@it>^fΛ b4kb~>[c;PDb,ֿz=M%-vyaKB.nlY3n63,oIcURmMi:У6{gmU`:VUY'2R> 4#_*+GWv+斏\쿶U蝿'vߞ|oϷ__ޜ|Wudu7D`$ M&77yWMS{ӦшM/`'{&yfa(w|ݏ{`Ҵ lR6a.Hi%׏UB۟2'r.&lDD,iەbNOبCb= l]{ʽ>ƭ>&||똭zsYENw`hnɲ/@8LY ȌAIZX?t%߉y;u1qTV ugG}Hyt!'d{WGnDm\|Ci_ \ nf"MR,r`Y7=@ڍ+rT PlB|C]j0O\Ya+/L,]n[X7 &[QqNNY_ŀxB!,ʤ-Z_E,&kÔ#(MJdDiӵBih<lxL"9"#@ Rjg9$ͭ49^@0Ir\.U9vqkA;(t,Xۨ5^)AI.3ϵRH9;]aRg H'#MIЍqHvtwL97olL9gʮBW뙶B$JՑ }, ~fMIa쥠[dm3D`n/};G4 PG~;ַʠ'w,b"^o7Bϗʿ9s=وD1]. o&.<]5RZk'IQ'JGp >C#aeǎo xLfb-ap=8."sRroT?O:` Y)#7>&1jT`8LD3 ?vFΊ{?CBMNGNt-I|8!fj[FB}:|L-Fz L oZ+![ÂLHNG&贈k9ʒS[ 0Y,Z]BL28llSt$T90uqȹ[@8/ rYzi%ܾOħh<[gD4,( By @.|TOSrUd…ӥفܔ|9ae$'&d *GtOQaw*E٨!_0۶SA!Z<۝8Rjtdž!ksCIA^'W]`cc |.I^#Hok/'䦑 ^쌜bѡt:IA'~aUڙ\S=ߘ%1<(/N v"j(Eb.)) 5b\ڔг EP*ڴ$&T3,HtXLFhctfR83xe, 9EθY|z ,S*y Ȏ[0Ik˰d# 3٤R>!s;YQζ%nvF[s^0P*?!p'F*$aŐIp%{Q9@a .EE~bHd#/s`FT&3XSbaA`6ڡCΠV􈕬7Iz@*ސ(&/^"  Ley(} F—:dv8:Q[3L,x3 å #O,Kə"s8V'T)9 d3 UtlJEw|Eh+rʱ2Ѷ_g,o#u!|ȫ&A*uJRz+-3gۀ P<4@HNENQ1cuC֕ޱtz1*5#?Wʇ葺y<9$jD8$M(ĐN }.螔eZJ _O}lvw>.ڝOZkm#9 /ݜ. %Џj1E*ʶr~34$% GivTU /H {xFf3Ilyd6k}l^"qyg(^ w6)A=D3Υ :$Iu&AU8@=Ƙ34GXl:U)tLQOTZi%RՆh&Hq/ҝHՇ7^S; cuNOudjr ~"@ AY^J[Xk:}>ё8I0B!"R#hH 9)%7NpDGVrD;A(;`3j2*+[Ydkþ|oHC$ۘӻࣇ #}Z(\Qa<,jcY|4\u:9Qj #/M"E3罁mWS9g8<( Ĝo \UCY]:BydRG4ܵeyykr0(+ho^Pcp[*PHV|JrF)`6àx\pSs WTa%h+ @ G h7&2m&) G͏vi\*ddd8.i-B.X@x8jR/41c-lw>y˝}Mo!3:yC2!dzMU1E˯o0huÛoaR6RrXԩTBZ\2"p9׎*;4 &e)=1g|8e(z r:&YtMJi*(E&M(Mgd\R" ;Bٱ,,ܩ,\(?:[1[b UMI^AFɗp w ;9l1< aV\d{8+JDl 51blb‚*H$hӅNbly X/%L% H*PB@LFf@bLP 1H /{y1k(!J ֨sjҔG-XʌFEuq&r*Atc-=s.9EsbӒ榜e%r }UGMu9^;́a8Q,,xZ@;Y5Zօ#6tn"YUXSzB.1ψ9fWՀ_/!BZ|)>ů-.aHȶ& ឍ3>m]G*ͭg@L:'K|VW+ BS1q- 2kߩ.9?ߣG2;Fh" ¨Mϳ@xk](7ݻ?4 .`[à|)LE> R 2 H%傩DeM8ÁHT ƹDlg9QV׋vnsAli_}"9mΌO2iWM_J§ f @8FYe=|s|CH\Ui>0KmԚ3ę!x=b,9A]`]M{Yei'%ppr{fsk5k>ny碉B.{<wϙpfϘ=c1W4C[3Ş0=Z1WNJ7MQ[jװy7O9^~]ȸFs^[1i~1-vR{ue-kug; 6p5~{Ꭷ4^k\Zh+-|&a+dǑҏѴ-TZڬ$@'=z*\a[ٻz7awۑw]:{JdP3a"xE5wƂtBPƃA{&SPbD;<v !6c)m:3|X7va&4ZtMe7zuVٹTHx#[%l Q>piP#PHB[w\oLZGmiD1t6-=C#fsN[/yΎ DPqg(ibIeDIN8&DxhDhBmx5k$Q@MrR yt9H 3:9,r=Qnd"~7:&ut& q꨻qޘ ] `JpFPUHL&hUd**m7рGϓi]_( [O L#{r,w͚NΆ ._??ޞߟ2ٻyw[u+0ڦ`ɳs&`  ~~:LqfhnL7 "(?~vu }: ɇ0H}.xč 6#_֑qr].]`al/e?+Œ !1Ӏ@]| -kuĞgxBb IJJ25Vfa؜<6`/Y q+FO;NGjƌ^cT˭TIj7uCK%eVgLF 2"RdN%fTqş.֏_G$'YtI֠i^[/3e$CIB}>f41)&EԽ6s{i63$t,r(sd^\IPn]q&\ (roBUلWy9D^5=g+u-;`|[yZ^W5<4i&HbT dC9bM*/zVB{V?3$#˂P( w @0 ) |.uv3niQt੔5͋v,ZYƟ_:a~">j3㬈+!WO ΄T@:̆S) 栀n"Qo^y( ݽ޴ Pk6ymo}A[(o(1ە|#F,1O\O?͙ͶFó/4\i M,&2itXhѹcѲCǢe'E{l 2A\Q$Y/sCĜd^)UflA:ˀYr,q\8-<V"Ȭ'*j3>6KǣQ ٚ,7}dyR~dy}AO4>M\m6JeR+94A2D[` =R~|8τ]4v+v0&Sr0c? t}6 1_^ߍ Dy$MO#Oh Pp#jUvm!\} $,%gr :ǥw*h~4ky?.oc%0Cg%cRR[H(F&xM"$s5,'Jk-#㚔"*[ J-a٨H:(a6V]ET1kZ;.~շu#/v&1zf )njbP CzAq"w#UNrq S ^Xc8Ƥ}H(%68ƜF 8MPk#9!0kgLWg-a]bUxɿC@mI f)vqZWf38q'@aNÿՄ{2DY5L=w+Ct $M !F4 Z7U GX͉Wº"]ܴ`Zl2IJRirǞ`̳Va:y+^;zѝ6E|H5oQqr2Хt1Y.J-4tEZDsJUTԹj3+kדXXnCMI3Ϻy Z QP }ɁM,YJ :˫,5i PQӍ $ހg >etdz*Pj~ы*X۳t9Haڽ'­ F E>Lk <<]KBJt#`L6e:}!(X;km"CцQ*jlswPxvicǛ ˫I?i&4.}oq={s a<Z~-c_q*oo3՗??LSkK'?ҕ߮3 9]o<}.#~*'c|wϓVJ;'zfE+f$+2z[,C.vхP6{imܵttnN`Fhj@ 3kK?K+hEzxu=N=}LBՙހ] Xp9f@qut=^Z*#]f?NWov#4\g}X Qv!L e5S#B_c5BV ŘxL>G$n,, +P)=Ak,tsBw)j}FRM "'IS\KpsI_}Gćֲoܽ( ήr Bvyk }2 XFrѣϾk3)x`VZ]%,@WVx-iƢ4 btJaբn2=}#AH1{e@~M2ET6f%vuouC]¡ ԰) _x%gQqVDӃzla }I9@H]fT?vX|76ElEDzףth :cm":HdY,,-#G3"ýK98*VP|<˒Ƶ,pfd3 V"Ȭj5bO&O,Lm嗟O Ҝ*)OjwZaVlY7tRo,uL L!I!+w)ZyAZ \(E}58^A*}tJOy7a 򸇟֪ؾܺ^5 ެWezlwhfbN1Dֽѫ+V/˅^'ێi͔{p;ŕnQ.6l>o.d%o[5zXu[.Y;Kཥ{Z^K fٿ2RsM#:ٳL`W--2[W sOr|~3S}EeS N9n;L.Fmst2{_ẉ?s.TZqEG_OE_|~v,W&I7ݮlg*jX+娓:T ǘB&#(˅OUXbhKn͍'XMuQF@=jEy[;BIzݧ(^ao$Aܟ id?bм:ëD zJ4Y# xSn9E`{B.'SOI_G6d]_ǐ7u`ny4/8XH8(~_>ppE튃B!a  nٻֽei82\@{\H/}K7ׇo}Zkthl{u 1 )PPX iAv@AR@)P`v|db\=N`whs(qus8)閹20W7WO}"01WE\e\i ﺹ*RZћ7h/J.l*73oW6 0`꽒;?_~`<6M43$x/=3h9`'[2f1"1E\LuL)yoߢ,___v}0F/fE<oh O~ /bJ \//#e@Mk.~߮ Oc:jn0t~yIm·|: t{mI݇1&  Hg4`WxtL#E`GkhVpu)MZŴ2dH`+UW N֚"sz抔I.=؜6:eRyzao??iSZX`2)ec5rhF3HN+yN8&gJf` g%z&J9 IYZ)R,{{ U(vht~V;ɧbtnLrO>>bzoȻގw-~ޕZS*rwzWƆ#wgaxqJ^H6=u_7TxSmIo.mŦXkòHsI۲j?^Sߓe:R7]SJOI*Dp;_]-r*Zt$4*Md9(6H^8y!R]Q$Ro7풚16c>"j|1صdꃶ% HdB`^O!1 2FofHvLi{:Oi-U^MLt`pKy@[yJ݃AR I;5Jerք>㑎cմuRZԄRZT2<T{~(L0C\,ysbe#)%.F:=Uȁ<O,IZ[  !j iJݖs %$lr^y4Ϧ-S3o}%o"6#_y%_sn2sl-MnC+ t6bT*w.:*rҁ G"͊3r!wVsL g}ƪb*ˮ'6>+뢕2Vvƨ*@պjɊuH&5>ʷd涶,2GBTY(Vh;rW}0t:!k` [+iUtPcʾ< UDlG7N>g ^'{fd%bN*V!&z|YUiBf{рM3Fbf;NA4IR6ۚn힋)haCGMPC>o_9ΊTn2@2{π٦ʈaNXb9lI aM3WcdR}IF0GD^ "׷xǫ _\( n1$;o39`u>㍼DWGa!Īkm$\(Rb{ @ ~(Dv]zruGSϩ֍Mmߥ \[:d FY餫oc,^a*]֡IM|@چǼ@waV;ي/RgEj[׶#^:&/|?cѯͿ/^^8-vOrt..n/u'_[>sC6UvfZaZA:hN/ 3P%ip`[ALw* qPnȌi3z ۊoW7^YR@Vջj _؎&y+68 ֣ohΉ`:&=vdhkZ]5_:dXS{USg=i8#m|KeiuoSq[VN[%νt>b/`wȽt_:XkYGs L9 b 7[ksӜ ItB|!(16e[}uPa- WBfxjˉ2SLg{i9-fLޅ_Ѹ_ܹ}epݶ{6W\!ː6tu!6zX=ŞIOy7AFn{>̿|m0psucͻc]!2P={\OgVlNW#LN47zh8nƼY\5W\Zn+DDBlW/Y\"nC@R!YXg̤JA-u NmŅg)%nrꯟKb K2i~lK>^wwOhv,W1NՖ-_Ȏᄖ5IaJUnm<7]k}#>b VqiBi>oh/Gd?5`tٌ7čPR m@u,!XJx$됵.dMMi |}!<(:k=>Z2bu|E͛u}z.d|30g`,#|b`͠xePr ͘MoL#"}ci[E=iKMn_dݶk^z*crSr\೦{^\ YE"Mk@}Ľ'XαaGҍ\ߜ_C=s=KZGmBFnA1nLM&cs먔JL֑m{H?LdK6불df_e_;CϯفnXb>q8x;9N:F?.%gy J:+l.Tʢ3Ik*ωsX`U襛l{."uT ޣ5 L*|ڂƈ2׏S?w{cc`Wm R?KldS)Ymugw"M {$=8:~y}XUz)cXis1D8j:Wv3 ig!\P ݩ^8^ #b&JDL52ג|)Gq*VV,)gIXhE*T| ц!j*ZUገiۙw]( ԝݖH9r7<-y" \S9TȻkŖs@>aƅ6F A، 'Le -!Z*лm<;RLHFCvm5xѪ4:PFAINF8B݃![*/ :P77Nyb\Lk"gфvU^RBTi=AD@ZqahJsT+TTg*O j(Z*DĠVj,nZN]Dm_31ռ;A;D` ]ֵ>@0\_hjK:6XU hh eJ, /qo ·{yAX”o+V:SQ*c2`i3>x}] +dJTja·A8T"r ldHl+ٚ)W=D@5D>eP.;lH0 l EEs:sh$O'W|}gniYfrg\cw~ *Cve]1GXMܙ Ԉ%#,kNSVRrh=\;i`Z=G}I'),F^E2X9qmPURwEG<ีc~,>v6Ūʸ]1[',w,X峡4|GZO K!'WR dq&LH?qM΄2!4ږ!KJmpT6F;Ƕhb)3*FfUu`jkoӈNXvWHZ(kXWR̢\,Ogh.r)Sy 1ܺudm8VIqU*i,Հ3Acn{NHIWOWQ98fMݟ2jYWXlsM:*Qt҉2Դ)ݳwTJg^P^3NvDd F(Kb mecnQ]NFBB83ȫ9k_Нկ\nc7c&r#RM`%ɐmeVKc*ivTU S3{׻V XsiO1\I6G!QM=V{`ָϩQи3K}{  ȣ}ʖɴ5=cgX16SDΆJ Ja:3q7g (۳p^S  ΊN[lBt!@ueB)&.eП$13bsφP$,̾+4lwœ)V`ȺU)>(yk5,!at9R0)[xuV6Oیk][y}quu<8[ADO5zxӻ.mM:7!0Fq$Wt4 ia#A`,|п'z<wۛ{8G%hC6:N'40`Rwc_1K; gKJS?~~/A@.ê7Ip)(v&]Ј@q[CxTW)ZS0}?e |X8 I0U< x}N@\Eㄨ6E[| ky\ Q!LqIOɼF% 권1f%n,:Mʰ>yP=˦Gf gexPQC{q?7Gf|`0'P_1nN$ch2$DC5sk]0RbsDIgzwքy!~17#%[⥧/)>߾)ݩRo~+]`tϿ*؍\8~a7j/3nTJ: 2/FlpU 9+rvopysQ:N S\HKZShBJB~Cmg9i6, B7Tn\/o;+;ZS_T}7+-w&wT0@YuWg^mͨi 1klJp4KooWC-rjeןm3*g`' $].9ߓ<[2N?;?Y<(FOHU KgQtAb5zfGV#颈@[4thpZN[\*Yv*̳ƷåPHjEs mʴ)9(auJE_ r`ħ& Nx| zbN|Ƅ (v!% |mg;FcɅ~F1@@l$r~.1ZN};&QDbcԖ^!GWW#X#G+?zHǏzRz򫗚I$Mp0y%ws(q4tnoeI_I"Ne85+e& St}VsRv6sQ@-SW@ªSP0H]%6*+sQWZ} $Hv5+ͨyŧlq۷b弊c8 _a^ ?nl/12.( HQ}M`=#5>eD-?y5sQQӔ*5BMz7exڳ{;6=sJeg0Q QiAɃ~xBJ t7{0ͩ22Q2SnYJڑD |?C@:êⅲΎ*}?}C`RGSJmΌ6>Dcr E5@l <܄R^\qXtW;IOxoS10 f1grH8+ځ}9SB_w/JF~m]ߍ׳]Wx9k77gVx~9G:=iI)Қ]se _MbhKR*g era'ymuLGfB`XQ0Q NRp(86Pəs*JUV"sLe !B94,2"WP'7LF6Wm@xH iFnG>ҿm`ŚQ2p+BU`FISJv+d'TF'2DC(dK !PÔPAX XK@C"C u WVa S띱Vc&yeLE۱hj ihFΖYJ꤃>8J{(Ūî 9o!uNjޏ'6OnLYanZz0}¬5Niuok"ג6NBtl$ g5>2 5:yz}0Nm6TGYt-shWܺ}Aϳƛ;\yz~ k+;{mx~Џۥ8_б7ᶬm4+sפНfumS6 YCڭnrt}J;$}rlH4l/9i&(PJEڰԩ{n( 968+LgRKNwI4˓_oWWI!5}!f./U;à}Fk8}:Y7&4~#E>`a<)V2""&ZH0<)c"ҏiڀXz_sxA9jLXc7 'u|*gFͧJ{eÜ}LRfNurIRQ>N@s޼ӓ10w]wnuohAۭݬj$rYN2WHf'`O0,NBȂ ŁdbvjcIK& {v3DڶLWv7*N˝[4b[-ue>k&f \xgc{b}5 G/^3{&^]&h+-|&A\2.٤ L㍹ҏѼ.T[m}=}W)/<3nŠqw?mi㤅W w{wO %TV@HYNxIҁJ)wR})LRu/q`͇p`F*kAxc!5Zha}!YPQ¦U'\d)$'ם\Q)G7LѝȥϦKSѝ]ט;e;'r!)c̱|2޴ )<@rFϵ,wJÃ&t=^H"ْBQ9Rvb7].7"3 ۰v,@N)#{TOD3ڦA^TrHg ZGK!3jg>LLj9+j$Yq/q5GA6IL[(}[yvZLiGy[;Zs.9U rGxW[fNKrFM$&gF ,f<4vN. >giIl]HsؑH!z#HTAqa"<VgkRV=OCJb #52(fT q`:-RLp.q˞'܍amȇ_pQx|8 r$+-DUX`Xƒƃ)bIj$T'v%i CYE 62( F刊`) "m?Ef ^^*kz.@K6]mSiβ+H1Hd &(B`T".!I: +y̝v kP\L5a@h [M B/Oa g1O-x准.J5`l0<1^=^%sO$uז`(I:ߤi ̻170r}c0ο&N\fOSB5y'nf =ZٽMQY?lM65Y(hG_ÐJ?3yܑcSmtpOcY?y ݫ ܎޾~W>z2ȇyP W` !à6`$},;pg_ДǛ ͍Xdvak oՏo *Ŭ[}aZdr.xc'ξ㶿:aE゘b xƖ| uu\)d1hC<h ͧqٲGڥRxfT5HCIލuYNg.o g$<%`Ѝ.~ _J_SgQ9Yir8pQCIEN>s@( gSё$jVG^׹%9ȝg*a8IZ@$L0=||6Q8z\ByZxyT}0\:9Qj+#H|9!U0E3yNqV's?K[\d" < @Q}q^dgy'Y㼅pJ2j %)#RYL5MZK$;~]sN{TIa|57wc#kKV"@I@L F#1'+C4(4==Zd|*dddhr#hx^41[, .a֫#yus4m#sۖm7uyC,Z~}돜qO:~ =0`4 %a>.gEw[$u9EqkιvTI+0F*J"Pe7)7D(*)=1)@>K2=2f &%4DEw"&R%c1r6KVAdaXY( B½9Eynl(*6yXCo3iҠrsO,)1brOɓ@#łwEah8%7D/j8 Ap"#{Y\頄W"f$#& RD6OpJYbA?.9"JmUXjVjl QdgFR+mT8hHP m:ȸd$Ys%bTw:$Xc+0@G&)ZGO 61kUQ5ߴrZiga{`yqCi.k63o(qV N*mYi2l5cfX* T*+驫RPy} H0mU5Mǔ#N(G!sG=yH x)GnWYϳ\N#C$.\ A Fk!(TB% pփ˘f ,KSSh(]9[ =&%VhK7AcPdϦBת&+ŇGk"Va."X*-$H[h9Vh atL̿*&$j6F'Q롏}h0ђ$D UrJIqNp$劾)xLG}Lq hm uUAL2(gya>+FΖ|v|x|75 B')=e@-|2ĻR>1$~C- dO_ -[`|;b T_f"1G [_1ah]kj3¸Iy#PT9j<9B"|Q!, "蕎}c("8 ؞B`~Me x5͈7J3=$%% ձh:@8P'Z9 ^ur-CS/hrmG21R W܂=|Sq.je!DI6pA(B] q]Q$0UFcn-b;A sףX5`nRͲnڐy(6P&;ЫR2o\/?SdЦ,~+ktU+ ݛ'GGVheELĝ|wsٷA4eP x?IDQ%$P*gf}pyt/wJ>_i'ϠO>/t&gU|V?'%j ) \*F DtV!W #3ܡ7*d%0JEVZN[-v^%ʽU]wG gjO0~8`U^ m8Ϊʺ*3 [W|58۬҃Ho=X!U:y=#uoCi GcBMC\>_Qy "$,pN}s/\SV_gkC'8PF2w.ۦ@P;-o"Rjc7ɻGceܹ~=~/w`>`|<N?p_9'MۆKvڨl[0% 냭~PI뺟6TRS kK; B$ (*4¤04v [IJ5ά<:!QOay`H6|l'̠ \Kce+73li)t<)– _֨S7MJ?OZ7,~LQ9с#*{LԿO@TyW 'eeb"iя䮥8($8&AEÕ4hK51T13x  %ić`L΃Ï>i2>q8Uj. r-iAuEu!G3:E2oHkuդq9 lΝ^uز–3]wz^4pwZ`Mtsfoz=o8:I={+^4ߚϚnt3oz)֏vbַ͞6Z@9ggT;,س)}>彑ZCw鶼WX[qA*J&tɩm`+ ,c.dNp!_]Uy5o oj6a ep|;}}g1b6ȇKSdtVJp2 H%傩D @M$*\"Xŕgۧ iQvƳvWg聝͎֮M V RqhYe-ߴ|s|CH\θA4+,0KmԚ3ę!x=b,9A]`%&+AVsopŹA,Z"Du:;t^{9 6N]tD PZgBcBN>{B3#=Kv0oYq. {YBD #)譱丌I0Ei)4I@0&h5AIDfCGB]fb};oA@+,)DG+ ÑKJXX R ctjoWh}aegsKZ }^8"xO\1N{Om%8F$?,@P.*09zY|0?g_/v,δL7MO2w͘,Ii#c8K&5R:O`2%!xz-w!(}|5 Z~']qB*GPe;(]Cq"p))BoH@&~E+T>mUl'_&rN RZ[mc`9^) /E 4dA}NuK=ڳ$ǣQYTR8_֙__]_tbzf1~% ġې Hْv*kZ)ҶYY1蘷SihwOog)8* c+Mp1Fi ,A sUVy@kUP"(4*A,+̒LK%LJ 8%f"s@WƏզsǽqؐbp6:Hson>lCmrӘ8̮SK/x1 Z0[k ٻ@+4F .bJ*`I"u98zN={RC#89 E2VO%SȠvZӹoYOǣ4 rXnYb>,7eH%rWղlEq3 @_&BћO Ik`R s@&%΄|  ΄4{dB0!s"ĸpK)3ZZLڗTl (jPzD u+j 4w8N.q}X B>pQ:2 1+jӹ>ANs?ȹ OhAeӦсCH5FXUڐO|c]ʛMb @2$KK$ @OIKƣ&@Yz-Jq@)q(%7BXsnV \wҬ6ev" e[M5~hU(O ҊQ 4Z{*h =dVј$.86jƥq|U*),18cBf,+9OKθY(LrJa<c&H{ZdCG3Peh1%/+Yܑ/?SnPd4 s,͕9(ŸɨPKLQInIc,\PAN?2FK :d$ߣ%}efWBF(/΂jUZq:Nam"w  2%1&CI I xKPB%K:GWe)"໨is\1IJ֜R 2A RYHBx $r3JAxF/z/WD|H5oYA.eT”l^Ĝ3g(Dex#I&Nܘ 2:QEX&.;Q}W瓢X# J+7Kf~S(5qHM)Dj2ؤ|X,lDX^q4N 2y-hz1/)z񖎼ɻLKѧCh|݄| ؟|DFM׉o?lz.zm5KRiR8%hgfQz#IіZM}e:WFlx9fyyph{с IJ=:w/PڙΤ-rY1K.{Iu0vvv_f79?[9s1E-f53p2/i?fy,>x3!<%apsC3>y>!K+t%f%Yjd)kT44rW?dPIQ!!k׾4[^Nk P 873q<:%I[SL6X])GYPS9.Ф?=s{O'H!Egk߯_v4F7-ZXu:(rZ>Kƕ&~A nA=կ )B+\d_x ^SR3o"q$ۢ䵷KoYg|AVX ֳ̬׎i0Y"<9K"f:&gYN*j'ɮUm\T4 QsCL}D5/%, +"@|c$ۑ4=e.VqMvM*ҫ0ثpjtrpUhU#E۳vM ;6D6~`m\DmSq*& ^VPU|{r[Cs)$(744ӆg㕣K5ST1p\*RgsJ94̀,K.H`,5l>\ ,&ÍΉI3 ڒtnQʣjq_Y+Bg嚌ɶzӓIɿyg40> ec͑+ ̊@@"GpJŘ4މbNouḱ#Y"/H7y:M``_j HHO)Y(rl q kJ+lJm)tv,.9]b.z+qF0/ݙvlںcj 6W Ufm!YRi H;.V2Kuw1X%=df؋2ƚ!jhfJ.]Lu/*V` "v"̀"v. m֬H W2R! -e05EEE گYlP`i¾Q\n[lOZ(gRvטV(u~a#l$0ѩ*{skݾ*gi-*R N?SN:#?I0roJLJiz?/ f~R6|XA,m<̇tLd:gmeiUsQf~v:.0_64g!A hf& )ok*ìUf挣w/(( f?)oY~OEKK?OrU6sht1 $X'5=@NDb49M lnsD(.y7M-|8i\ 'bPRւ3%01ÝAΛ;S9yx*76nXҘaq>E  (9LL䂋!DWp!JeQ#|$ãF2X)xm`!rZ8QD]J [~IAzd)4P:dt?9&5Z+ "L d9W&c~.4*1WƆ(lQ9GrZN-4~I]ԕa6꓍lNK[{^T:(' Y*&W;JZ#4{!4Xl,ǚ/f`~akOh]uױz`psZj傷_sPpIC} 8-6{XR6eV5":>-"P"dNDAT.dC&"bur*2z g]ֻ+ 47 ^WoF8=CS-x3:?~`OVw0]6wrs_wɚwU)pxUHvey {0}lA BuGitQntTD$)bMN-׏K׍UDRy7p*U2DMTcjZqpW&Q2Z[̩Y(eR"% J笑% [o ˕'s|OIB6"oӬLY-FlL:iceRPk4sfn WNhjECcy*֑MYdφT i`7EuI^Vƺ@c5HVhAz#& O9AfyVCĚU G|NdfqI/zO!hjQmЯ/{緩O9-U 9|?N} $^Z$zp~rzQJ8gqɋjNJֈW;9mxvJ-ף+ﴼ{6\ŕ_ ?.?xJn"}A+̉x<)7o}+] M~?osy#iIm-k7#fV%bʓiQ܃L>?]ts'^imzZ]՗@ڰb4w>c*?b4],|cP:aǓQ<>}Whu c zm& Ep*R1mM7KyN1p7Qk$CIB },Ĥ@}6x78%ŞPGf1 ejDi \` F7+moBP,B/;Ӊ-/tV~۴,E=_U8zVV+ڣ,QnoVVqܗ,WR)`['!7~ڼ??LE"4m}&HM@rhmuvʳmw<6$ {pYUQ)L Pb6I_3¾LJ60T)H5; :1+R aPœ;:n1lگMmweڷ|6^~xگrG8~y ~;c(c$tߜ爥E\.^<|24ol]x8:טF(m(idIzpΑ?%}Oae˗e#D& 6g)1J] *IJ`u?"a`̨42;+6LIB]Kh[M#Xu;gåcNO=K=݊G}xzv~)ėݾl÷m򼓘>g~Kf Pp:cXMr>$$PY9 y$oS[J"і1JFU"Kt%|#zv|6˺=Y־tZb/)Zb,ȳiԩsϲf_a;67`p!wSqBRyAB~%R܅Ts!u\H $&K ВJ6RY&jfs&d*t9Ca֥ v~քR fwI`Z@wT i8De阀v&t}z;yy1vcwyovn~\evߚb@cv{,EVIW4 lZ~*韏€P 7XLϓXR~Hw˩>7}^͌ҤT1zK6&!IW7D6zB*J$ԆGńdw&гb@ f)  frB(03fg(+vGLdp|\t{<8S]>uR_ΰ0IiK|J9+j5SvB#(DEQۙ6jjY|I*;3[%c)xPKkH%ό Q"I *Fʧ(8Ds-| Cq kLZ,;ٱu&Άvֿr#(*_MZ (<5e-b-dQ&Х , p?ժ]w`'Ҟ@QɜATHIkjx?ֺ% C FM< Q$%D%OډXۇ[24Gt$z ʀ7B{bE)c/zҙr y2/(Ju(:>#",[U@b|ȖƷԅ]&|dr2ZȦVpa,H?{Ƒ Mp2R/I{Ng #r(ڃO\(9Ik ز8ꚪ40|BEqDY tQ1327v&!jnY;kΕ|4Ɋ&Y[ȌV)HMlR-ݷ폧)0+& u a(^rÜK:@&1dj t(O@c11 k 4wx|QȌ/ 2XV'>9qdL1D3,E(s*%K<`_N2;r'6 Aҟ^%N+{/t߂kS}oO biHu_ǧaL*fLμd6vd&S e!x0k5f,`ZƔ|hFs+Ռ]f]畻E;]x~3Mmr*Šw݉E$5W83H( !bV)Ol ̝WEWT)[ݒ H (E.(?R r aa9A"Zޒk &,aU3?pdpyӪpvC[iͿ i uꞒ4kn~\CZJgSY^BsTRJ JqcJ M9b 'H 0c}9 MۗZ#g=N}]0 bk-"ʖQv!PNSʥ ]$*ȑ`( `'y 3nNzo8XG!1pz "xeRfXI9]~)DqÒKZͷW>ۃRrZp[ FY0atMu⠺,&>b=PdDɽݛ}s5]~Q殃@4Ai#\ .(6cFL36;˜v>Ϛ لKώBZ*HomUy.N!*`k U cJ#68R9HysQ b϶X~`*B_ס foC/%Rt7vi+yịw^}n G_ {-[VYl=Ƥ٘ *s &f5gm<ոva,-Gh?ȸ̥+.j۰ၱ6Rcg6(ƙARL:kkkKP^$YQڿ%"H:JQr:r-'D9E JΜVQ{3sLKS-uHָ"4Rބ-+rAYwxEk::uD0`͞l?oۨiof`Lk2f8ϔw!*0 fI#vD qԴL"Rci2H%RXh*N!,qAp"i\ޮg]h3j>UJ]KǦ㻀p 3etzsz%T8 ySL8SDc*Q"!%K9 Q Ƒ6&-,*|y'9azUnyojAۭݬj~nsV_;<,N3hfbN1W`ǭ[ LkO+[fV LRV\50h b&gAy\bREq`7xk*-։68Wߔqn囲aI {@/q'%Lq:]wKj0@)w n*ҽρ,0En|,[m3#)ԮtXu B z݅(^P ZR'Sh=KZOrtBu_au-$A*,U"WS+zpdo6y9ZjrXz.Cճplj>\=J-UUݶm^=EiWJkr2pr$O;\%*P~BpVc]%rPK1=vJTκzpsSP.>2UUI- mp!w?&i7r 2(7k8Boo=oJ5[w(*Ľ#gI 9HyRg|鍆*|4u[*)n!(:7l߉mXj|x2)S2Qì>s%6?x Ujũ*5FE!L]g<Y,f!Ynu OEASzݽ67IŸH! É`&ON1D?ïfOt=}.Ht[xߑU"XDW@-*QIYW4Bs )w뇟(m-VL=f!1mf)BS$8!"T.%=SiJtAqn #"I=DE9Q e#j ]_?xvaAOLxCh41 {_*bKȳNܸV=y8ԯ٭}x ?7SeI&y{$ <6ɨK-z`aTX#XB(Xǖj" :RX|qXӬPZ`ɑ^ f\"A2 )Yyq7Y*%۳-ONpW򤓘0xkV.VQ[cLl o2%w9>Sto!t[a苊_I?Y~4g[ 56Y#Y:jXzZ#[a"PctY%tz (WKM)8FҎY`'1^j=ZY9<)u`i[ -bEѰh}uބGs:*3x)E'rkB$;,i-$0S@@KT"HF$EjaQⴒK; wc!P O]LYuPO50 P[C^aFGR-\p؄ +/XXFC@vO!RK&AQVF4X0!!U@`xO R i3[)RM-0 BodgϿb|B$\ZvD´7Rlև&]R{XnR ظU9{R?)ʂ_Ǩ\FESoLi=αg ""un=UJ`HVtb  U\&㻂\ *2IQqO3:2>*4aMM$oߪ $zbz;(#9S8:w!MMSZ݈/6 0|0 fhNU^x7o-!F2 |u]m K@>zwm3-Ca$PH/i< ֍ GPDi% 51ɕWŖJQI64WFyWo̶{3 $cwm9=gdˎ^i")EH-0dT}:;".dk fr}:Ԅ\ymYv%!wekRH&# DwI5,H#vr 5*Zn{@Z[.{a{UԂ(V@-9=|#I75pD#;}l2w־:_MCT> ` kphJ%OE_ ܞ zu!K[w f3d}پ.M ;Y晭[ZUpQjd Lr :Xl XQRN\2C_SaХ qEgae(r~SǦ䟙/e}*h8 ~":*(صy$ֶ,zd@m(|xU~~keύZ;wkɿ9UH{xba·] C%bO/Vc3c_:h)C栱wA|m]@G2*2'PL-m%*dB4E]C3_`W(H`9$ Ʒ|aoLhYyV>ؽpw%A,՟?FRNPlu+vwf¼7%3&gTJ.צ)lX kj@`1@f 0sg EqDKڧ%9ƈRT#X q-PUVY gY^_M&>|(~G{[ɫ?W[)˳!0=\HY)7n$.dC΀m0.Ʌ|'ҏ܅ts!\H? fKJZCޔTKhDڹbK-ͻRŦRT}w*ʷXiAwY;NFƺD$G3I@tpvЅ^Anj,.+yϷx㘯K - cx30mÝ*j}w*B^ fs.=G6P p(F,;̶ 7 r]!W*RȥPR=UNg8NegbpkKLu $.X {ivnDtR~LR/uY>徽!m0]TnjFuHTe l\rejKNAKو_8In7^lLl[ƽrb UX=C\uJ+1 PKZ䪋leUPvNkO-TJ=VFx2m2PRBUlg96=l|mx[6<M6 Mɬ@r-Qf-O-Kla;@OUBQOfJXwNR`(/N%KQm}o 58AnP.'?i^$BT|NMPYnIIYq%Nf!S ᶂ/C#PG1*Jn{ V>f(QHhO:sZ+/P :#Ƽ+hբTFwNODdl̨%ɋEmo=W\e0Uh%KkZez[kOlʀ2 DQGxc_tt!J,w<6k.w$MyaWyq HMNlԚ#.y,6V}ma*z?mQM%ÒA)#=]# x)jpy;D8h@E!7yViJxG[mYV)C0*7P29V"*DW[Йh6zP}/ZlsjH QI#WC >(Bgpv/Иt6Nić̛ly LOb!nJǣz˺Ni<]XzA|5{]m!`󿾟H@ͮdQ0D5DҐtf i: D4R$N g+; L9*VՠsFt`S FP`(zxTPl3` x o SXTd[?><Oި.?ԃ_] _|uZm h* d86>QNl:an.|f_ܝZk\l}E,˾~}u[UÉ1v5GO1[ezyį\#`&=y(KU)u*݃(Ts ,jbr[ FxZЭ 5:_]QrhwPfg?J=у7TBRUrPT>x%2:GVqA Mwe 0L+EM\\V~vK+ H&+>v!ȝ}n#T[L} O [P=)1 !M׷fFs%RDΪFTOrMd2k %{-j-BZEI֌݆[3nG)'хvơuօn҅ GvEn |x8gI$=1h0ˋsYc"`$V !1T-' =(e}%(_(tS{(cehf,M{ 9CwOT \{nMWyŹ;ھ֞J%Sv4^h:o C@gs6 JS/k[QU2EA/e fqzq_`0!aQ)alZ\`K>ꈪꖏJ#G+mZ1TF]-WSJ٫mf}Nt Ez5=X |/˧'fy|jD=E]}vR~ܝӧ-<ׂ/7w?W__mo]5uØsOyp79{ێ$CANHDHq!ߟxp]큶Pc8(7P8s] V mm#1~QK6άRhRLjw5qm|?|b"Gʎ-68`N6zӗ`ΉG՗<IUԃ`r 4Z#qM˸~tG"%z"qBG'BG9k9kH'sC\ ` \|ґXL`,&n$ "C I7V(_9*t;ݢ+2|4Uəc/Mz/{I:ﳁÅ uAn욿9Ep m%a m oۻ弚L[Wsޡ26in-O=?5qzWZn[[>4}^I-s3W%;{/f\Nt߂ ]zOm~`DuБ6'͓Dp8 Mi3zmf;փ ;]_3 MM8XV3Q=D x̮*ӃE3=ݚ8Fח"ȗ^e떐Q O_Z\d.;ϟ(ӧYk)O_qr|0EF`# *ju\[T*o F[tʳUndv/ׇYn phPcn)͗3ΖWܛ/֞M?JKQ`Vt$7܌Rntj= Tun),M)`Q;4"%i%5A#r $ /D~lt{3a|ǽ%-vmQ;|_|I]odMr9I2H$`N0_x֓n $GI&~ ӆ-av)5Wްvp ;Y:]ȃEڤƤ^O0-/!5/o#dL|^q{޺s(;tnn/"f. WH!ޏI"VcM<^=~]zJ+ƺ :סkrKZ/6xh:@Plqx?tpl7q׼#ʱd :T*^bL!@eEG*q *`_wS<'~ \ś/wnmolGR/lu. Oz=e&3(j&vq6,:<>]QZV.XLVhEP Mzܡ&.4]Bw8;ۜq M*Xћ]xJH"[*!f$pз(9*`s1$AҜ,I*w&G2@veF/Z "TV`j;k#`i0v˛r͗_[2ۣǮ*(=^(l*1wYeAfkM%&@~MV'.G4?{WVd~by}b,OԶWrN/oJr'MYs;㾗{Ybfm*5#AN0K# ź%[[s([J46=dg[2OT jjY.QuPSQNPwXM{#T. kP#*4DZ& m\RBTᒪXz94jj%M)٘X!jM>=ML "JD ګl+dc'dcs>1cſMlnY (WGZkY`_z(ĽyV!!8HSW+K|Y$+$ա#rԤS98.L8{aRQ҉RRp0J%бN!K+y]nQp^UtI@*(5dĩ^VtKY=:*QyT)j9(,6]vPmR;#[ij(t~=^ڜeh ޥ|˟?C_?zlY3DD\/zq}FbH3y.<`vuu%`I!,Ie{IvPUnsOY'q7MՄu ܆#J9fW[%drՃ]isX٘\nj QH >(B@nٽNIJ?nqcv|u @HBߞ's$1rv|!mK&i8w= $M}$ B.b[*y:JoO%I1ru1kNXն쀠![B&[ʧyW;! |sGtũ oOKx߁91{W *̨#wB)XJj[N5hStهl|7%OEgOv>#SR% ѡr܎gyVI]2b1YC`P9鮾,4èb5V~wV8ĐncIBvkn-S|q#UןN%SqNp".oƐ 'yV￾;32&orc;35롽!_ʖOWT}L(S2 ڐw2Cy]}NLٜb]cԜU ~bgj#Ä.) )pۤiꍌݚG,(Xmd< ]g,t$4,Pxd9V $/Q0J.\+.95g7N:?ySAn㱈:#bqBm#^wbtM>xl`ď `T*'+$AxRR8)`Q+omKS ={'m 4P;5g7">/luJkFcq1vEpqœǃNj޺+QD?1&-/ٛb5v 37_,e%Z_*WʣKɿ_?=C7+s~Zss>Okm0m:iϭ\2}Yf4lh]/;Ky:/*0TɅ6+3޷,tU˘e3C{,Yɽϙ3d.avٯKe?6 /j}l|/8_~0OGg-L>z7LF٧^ϯng+7WҝT:IlAi7CCbšЇґ$qi;N$qnJ()̐S.!W9\j!1l7JuqTuW#@CLŧfl-M PEcaZ Cjq5^֜=bCO)]旋O_']}8_~6)~$SaVgӟ)^%EG!N:P]dYyn;'dJGW93֭Q%3ey<H @aEg*Eκ5gO;W`fky-Un[+57?"XngΣʩPEyAysN&n__8}_ZbÌ)ba'yUt@4`+dDQ-M~'=SNJ ģLm$)mMJ(bAl}[ᗡ#Bpt|>Hnc⑑Bsc_S xiڨdP'Vt99#y5VتM&Eޗdt [}s;ȋ>cx݆#6RͱB)9Huޮxsݕ24QjK}#>y1W/sUK߯.e {z%nh.oŷ]{ӟ<<.i$iFm͏\\^޳ō:4=WM,N ./JU=8q9S/E" *psTm т-n;]i8eEyRRUD21ωI锖(賶q6& ,d<^߬mg/ yRڼfqzfї=M/?`P,!u֢JP1Q+R% դ"X>T208b` &D.8/CH!#hV%if_`T ^AxhneƻdY;B:=Ԣy20tk- 򙷛+]w{_]ۈRbzl\k +bjd.umI|jJM5)Q_s-TrMx7=gB׆Zբn9{^" syH*0"p{E>,qelnY-~ ./nnSMXMv$~Ys{ە} Iu`W$wI\*o&FfӞ*{%HkD5Փ[Osȷ]jvS,9e >WZ{ s2e"=xb8}5;-uؠ紳 )= J$+l.TʂQUڤ2^k#pRY軍G#կ{YG^xXOp=pIL繎t;rn| GB:j `Ubp0I4edy%c/˘karvȔwJ3q6e}@|҉RRoWcq55S;./ ׳ܜ@pq8/yN*]B&ƉS*I֋WSӥ{."uT ޣ©Rls,PXlVڂζJ[ij)|=^|vs 8ے|˟?M l/ [=,o~zGzmw &oXUvցw?{ƑlUn!k{% N_is#Z,dQj$a=f5UڊZpgk,%/߬g}LabGy~6kۥ)r,HrteT:o9h3YYQ0AN7zd@f^rrm"B&h8=$dFog\Of6ǓQ8g\ $2\k⅄k,<;}x. 7L<&%)aFgeb5I5Q^+@rkjðIƤp`7Y83bP^$jFd-!~'"ىx Rf\ 2#PcڳN~0B7z7i[;x3hp>;VE0ÃP|/:Xf }*.>6(i﴾d'ޝd]1Mw"X0q4y=3dF.8?n y.7'F\sa9ECN^*2!9ksy5D%&/|`ޅK" cҷ{3~|rYi|O/6}y WE1~ЊsZvllN xmi$'r]H%B]NPZȍ4 p8 񼐏ޯF燉)Vύ:v ,FFvy yj&`9pnԊ\>WG^m}BkGZBLj8GչwZH6_;oߝ?..ZW.!F+bq>}؎-gc("Vx2oʾƫ=L}'ؓtYgw$1' FQ,+d|xYk/ս._rSU0U2R:>LCyɌ+W#w[_Xy6Wl4noWat/ͫoߔ;=ͻW}uwF`Bڼ w&ᗻ`߁/ou[e܈]_N oVkMP~{^ ع@ Uvf!JD=[/ދ3C:(-9 `VE͋jZE-|X0WEM[yo߿߱QK י%V Z:ÜL '-PlYk V>E}_k ɍJ: dઐTPzo*`s";b>}[dTRwXr~zPOh-. dbRTBP\ k3Z\qMlW|eEibCZ ӷP.N<9o\ܻI|̖o/(*Al3Ds*I+4z PS M. I2:EFet1{F$g##Id1ے7Ep:E;DL 52vFx(Xglc,+(*6?)l^7hv: ~۠vχ/ɳ %&AC0RD\Yrb [;#ha( \頄W" :fF rH6]tE'f\;mQ[uj@A,3,ˊ1ѰbqHJΏC4".jTv)*Wy@Q tU1 w>jRN$/e[s $ztLLiveӺT6ZE P+8O'-eL$,Rx+#G]`R 4aam4̠A`*]`V.I,UJJL+Ɉ"uZ2vٌBXUj̓i1wz˛`}zU*ŧ5ͪEEvvVD a M I6+hf1lH?l|wE$z*! \ i֌A"e֑BSG͛qH[%2keK"%BʓsAڻ)'rDZĠY3rvt/#atAST.IU+J# 9X/RXj9kOdaiB=@K~/)wYb ȓ`!E  [mlYDB"!ַ ]:Ph'v҃jCMcYATI:!@X 5R ɺl5KFDp b=NZ)w.jZ4>E[b),(FD(VEiF:(Yэ#´&t~LR-LӢ-թ ieL) &Fkd~nʱ |QCcWnC1wȏUŊl} !*'DCY$& 6`jlz)|ey uׅ/ P30N1 ԥx LEc6;cϊy(O־)I\ĸ#Ԙ 0leM k5ȜHT)bAܚАJ>}xg^η/xă)_Ib~ ;+oY߻ ͻ?2[l-J& .i# B!){܈^`}熒ғL>n>Usڙ$S{)5Hȁa ![X$2|zru{> c^/Н\r/M߂h OV[;cNrlXwt|jD[/A[MwUMkId T-S+/`e|Ey*Z;3koU(`8&-6x`W5$kǩ[&tʒ|~/r>mU#:::rνLW4jVVja a+Ύ a?`CXr~ݺ.w $;S IOCEZhmte8`@e8`` _p 0mI鄗VB:I+65d<֐b&_vZ?gejvK-~>nh~NO!Lfk5tpǧ]9nǼISs%{mt%ȕVws\.󪅥s!F]Ԃ:FuPv)"]5^O88;wfL lSX*3cλA%! m0ck\Oߡ=r` PҧLekZGQ¢1{&gb"vӋ?JiH.g\Kk Dոͨ1# 3{зa>oeP7PsYI^/=[ ZنyiKk, zWT 0JHWs G0H_T!gBsg,_w.zFB $Ԓo AsW \ 0sjeMTwI$ PFJug3rvK9q 9^ʫzH 8=>[/;xvzrz_wfw.x"Ed]%jm|9EI%fD*UlؒJƢ4cZ-v(ֆŒ^;'2ClCeP|/ɂRrّqc}d5zZ]G P6Z[vi;݉lZA8^ĝs1^ I)M:X_4oS>X ୙pFo2S [ߛwswu~:홦r[*ȻD[Bn']mGˁ{&i# l]@;E~3ɻ%/cfm,~1Zy5œN[0xT"Z_>_.WnJfs=:6Ò  11Fb'֥Mh&E&ATT0HdF'v߹f#^ x'%`'o݈mv:@&B,']R /Ụk"r _X̦,wC[4e1/L^V R1g:P> ` g(]˦-.u47J,0;_ID/ScI(]2)tETrDwUǍ+yPw`9  uAN!C&R=!5@<)oSꓯO4X^!s??Gp ~wP/n4}Yr5׻Eȗb< .@:.;D1>SݵvKu*J';;[W'r DUD%G'ol 1-ǔWE 9(v gF ,Ea0y9`l2W9۽xv5./N00xǝwJx/L94TcFk>̲Pf[BHSM_h%lb+㜌DR)58~k)A62IޠYrZ7WoEΎy?Bq Sҁ!]o¬VVy/v(ak9|PHNa_~gn%B!UՎFX$ /tT'_HWޕKPcaNwV~ct8j!(վ@QD]rm21r3;{'HZ4<6 >tΥ+mUg<;ЃhNdXSB7^c:(Rh t轊8x'D繯?{ǽ dD=n=}f*;{cרRRmTe0,nQ?lpO ,1\'uc(Oّe;=K/\.gz + D1)`]wͩCbP6A%(@AhL޶0 ^ ~ݘy+ qW/˒˻dжLj)_QA 笕x*C(c Aj#>SOr0ɂç,)mQF=PyţtN)VGf*-C.ZYkoί~snzܦXZoWa,Kz@e/Yܞ]Eolk`c؄,K.g6jLУ '܄43!LH;ؙʃZ%(rh4t6A%PԒDV^N61&GoIFwY (򱱖69ZLNlr&0lui @Ku^. rA+`xyך{{kElS׆Xal k6fqBrČx@xU .Y.o }VAS?/.\]?ӧEXӧ__? /)_wQ_Fp{' ]ME)^q}__Frv2*h|SffL+&gG {d~>8/Tb(B=ơˡvoEY?bm ^6,s}ί}z V/juw̯糅_0/?OiO4),&䯋_~t{~<1򴀿M]?,8KJ :G'jJ(f{:~4B[Axg IFR$zE&=N.XgoyAN]SDzW%Iݳĩ NqQ49 jg3|<\L>!|tm ~?oŅtpd_DGGü饒 B5\z69{Ɛ#]ſ/q8%$NP>W4}ϧ"g]G}Mulzx Fsb {ef1KEjw*ۏ%R;l-75C!ef9AA q0t`żo/&:WolVgljjq>3 d_#_}9F|wHH1OhoR׼T"OY7owq'_y9E9=K\u~DcH05 lC7ݛhۛ54 v2 Tx=?_k*bCAxy;fU|fXEcOs a>+ ?}E6bwmڜvޮKzҟJh}>-]1HD-"sc aJ̍K29JniO;Y[u,@`D@Knܬ)=oN[ٷ yat$Y-X̻* ᜏ:f}vFc4aY?㵲|IM5Fr 9(> +$yºCVvFVk 3Jj:SRM3f^=^OApï$oXpUTtLc冃A1G|G_~>6G Nx؏TqfR y\c$?ޣcotL,!Eͩf{X9H>|_S:hz5eT&d?$W&As%FƆTffVf@",RZXRFS= gDuS\1q%=H"|o@a& (?T.>P(푺9Erqau%;of*Y:mMs^ѕ]FBMFx <|@PE58[$2"3 Bo^o_TZ+^Jj6x mPaA(J!guNt :씉<'x畝XCXy 2U$A$eD ͙$L.UЇɩSN_xp,Q(Kά)m%!(8e&e{L[P'R.tu'&ꅋ)XIZHg3 t T RuhuO!`9)76H SqC| BZ ӁRРEI۵O"Y ah͓ 䀑8ӄls`gPqHnUL7!I 5\R e /՚$JQi a,p.RiI+tDZPy9NScuKb@@U6M( DBǤrcrK歷 nHւe)40DIØx0Z+"1Q;!:e@֞u;%xZ晞Vou!B=C }۝C+sy9]S &(4+ BiךZ VtIԼ]FZ(^ިm=y۷؅ywA0H\To$>ipDHsFw WrHVeo^gq/R.aVrT!V &q+=7M}=7> _ +3r\oԚmԺ3xcF4NۭksZgDݒ]fi[fRA'T pY47SŝTjdEWvip+atL`Y9$b*έ!9x()  ъ-qV#}kx.|f+Xvkom}d2]Aza[FywX;TPO0Oq(`?ݙ\7)o2rsHd*UCY:vK)yyÛI1RUG5ISn, Gyf\ZUBuz2U~<*qD1ҁ |T1\x]xJ M97_K#g%o :T  [M.ub OMO3Aa>:o60MAWR$Q‘}1kJN//TNWu0FDUFK\+p sŕ>M.YmQySxy99^8^g+QP[9_{}<ǖ.m! ]N3;n?^p@cK\n鼩 1,3 bap(|8 xf7Y׹:V)ouɦV)yu%ϯQr+bXqcJyD~YoP.| oON_z:7``k؆l n47]hilo4z+,Se|Ż˯/Uo\&*Wx $A(#H9<q@ts1e;L6mNDC^oW%=CB4b̀@ӾpeAm66d G-érRA scҬ g30b}c~t0S"~VD9 <@k&ҟ7Gv|;mi C ёd4Z`1 K20s>Z)*0u;2ӄe%~#u99Z hLNQc$ NCк"@M( ';jj'[ouߍxy[ddr }ĦYP9>­GY_ŀ{Ӹjni teRo#bEJ RAeV* dmFb4Uŭ{Yx̺%R Z0"R!PAQ9 / &ML&v3q  bW`T KmG#ǀksXO[Is;LB84j$729JyyPs\~͇JПmEe4+"W Ϥpu^595 o;;Eo߃N7j_~N.wk{% |7Oc(`к}snSc^l<ʏ:6,l^kTIT%OJ+*AE p&FݎtʖZʖ#^ZGA{g4PE&Wpܲ%<8-}rPgdl9EtxLhM,(8P#4Z"m™ckg/mpO7‘r^zDvsJ{99YB YNPPRRNsa؀Faa)S]  !Kldhښ"qI{SD)J>e*ymh%x%x4NR׳NK,s,eD%oRe8vX͍g#6LHiM )ΊPLQaXgB> )w܄;gB2!N) XB5'Q'i4)*p!" P$c QTC,Ft<( Dx B>4e"XPpjҢ1c, @[#v:˻9fsR-^o8{ヰu=6Iƶcz{" ëOW#(!K~h+DpĖ)JN lEkM.7!]ՈRP[Ў;낲>tt(]`&;x=G3 T*4ʖGJ+9>H+$'ud,3RE#7('Cle֡vB[ÍK/M#W!E[{窥'/Kh0h( lB2jt޵6["ijrHh lv0Hf^6N%$ t,Y] ]EQX<CKh1YA(.56j&I4X۬o 3G0&3OfAVF2@I欬gQϺ)D۝h2Pj FBqIidŐK%Q nd5UPӯB҂-/.ܒ8G"To$-f ΀wY\wYbY)4-ǎau䔠< IlU:lZG'xw#B:%^+Wm|H5oAydKe*+BΙmZ&< P;EcQEX&/fȾ+kӲ7w'H>x3yŹw{cp3>{:'ٿ\[a/=y-cr:$0` nq8EZ]/]\+b~? LuK-ղ2,I~OFA%meCd recPVx-> RI|V,!y.-DJSJIt 3d%HU;*F+!ts d>)KKpuOӇ@71GoE b~?s qfN?&\FellfxX$#h?x. ]f4yXga{Rurc29Ca # D#FJEudIBQI CEUiRr)iU @Gn/yhM.YDTf1惐R+g] <|R9,3c2))pL#00#G—:*D4,G %D& 1D5i@!~WSxR#$HW aGtҞk-%Gnr)OjdOOuVRSmԿ-c{l;F7NcTt97VE޻!dkLD 4RJVsD`, 5U TI4dÐ5c" Hϲ䂠'~l.EIQs-礤@62Vg;2*հ Ue,T=>+$*6,xCfxxzy Oˆ,@$&A29V¼Ƥ(O=2u @P= ) lJm!ad1Y`u)`QG-q#⚉Ԯ6;Em]u=}h+,*+fEJ d"@tR~>蠜m$4ne r +:dkbbA$0h"QtUd2W;85 0v͏}TFD#bU VJeBBU[" ٱyVgt99I$xR1ĺܑ3 mHn6\F"l"Gjs%ejO8mեjd_\0Ba=.>8w7*'[9 pL: S%|_<6;CsªXh<|[lHDv=/S*wQn)׺Qmw_іxq5R<-`λR ƐN'ws|on{i1ո].cN'Yj@*x&.\oSKc-Fb|v6BN[Oi8iͻ˾y6 Dot~ LČ9}ƐȀ1j dc[i )OMT.V( eeg602,9\X BRG$2qLef+LTgǝwn.mQ26&,3T.=/>9^vlMs㙢l4& νE!!蜃!jkC$:sMBςe.wi 8fadĘbJQP9d4#O,:2f]`!F0jBayA.))Z9Az\AA쀻P;CX5qG#@9޼W^Ք|8a'x|Z;

Y/Foq- N2/DL6KT\MIojn8HE2Гj0~(A{dKn'û-n>l^Fb\w$xueg~_-T~Vh"-|h'OQ>[[F0 d*v1 o$&uqiD0/4Xv9-4_h0veJ+BL-]84 rٻ W?7>_GW>8w $wg_Ӻپ˖(,o8T&w+_kdm6{y1ߍmϴ^iI=Ӣ+7O^hz$zh:l8$0Ր"ad$=z ZKWL>_pO~ӌI=&JaR&ېcmKPԞ/pbkJ QEį_r5:-ڸ8ؑ"0+v ouP$#(_RObyYP>pցT|:Mzi(/iLϥ$=M\/8?Ӥy]n]0ud?wl4h,>nN|4,Jͧ~n)͔etL(﫳F+/G#M9K;G/`2ǐU 4w3GHXɇǣDѰqmx4BԮlg$ $}* ˆ+഑'qmV0Cl 4yb,1/$V= )L޵q#28OAb8Iuc9kFΑr?1[oaZd(PP>KU=gd2F1ֺʐuAPBV#b/&Ѓן zk EH~J&8m]YDBVwDet >4@fP N줍;6 D5IBg](dhE xΣӌ/= 8(&WY=n6붊#ci%r҆Tlkp'ɀ5G(YŋyQѪN*EaR2A0mW2g}f,rO>Fj!|z]ȅ4P%RQA)E dXqh 4,F c:^MAVr_鐍C~WBt&zAmq$} Q{̿&6FNb^7κ4yΌ'E76S;DrAbGB|"8@G>.8߾NfgR 1OR7/Zx& \+ID]leOs}))=)֍|? b@0.*B DQZCgCH:Z-P`{GC ~)uEe{R̺x˼<\oUSc^ֺ >HG%G_^ 6ᣉ/ZėWjR9_<˃'8 xu5gKH-gֽ.Z(O FĒ (HR[@lxZ] H9%V9JjJy%g.U}aË2hl`xP4sJ6Lv v"Ģ\]*|h}2ۊ;O'ebEk7jmXkAkv[+bQܞh)z0ߚ l7(M@MbO-dFҰ <$N 8 )c(|j֌paO_9RU}ш7ֈ~ЈFܟW [Ʋf۱^*Y%BI3H`'5T# pްmLhR7T=1C0I_؀-ieN^PLc، OY/κpiW/6Naà7)1UdSdB`:!GV h1I褳NT ߷^<^>l҇hc{W؎DdogszX#Jc>{fK7sNp#9kG+:Y*%fӼ pnNk;b)c*{jfw-[W ¦jXWtӝykߺߵr ۳ԃqĉI'H d Q dљ4OcDH 0,=J9uA[Rmb0ƅ4yE#FќՕ H (d4zigo>8y9v?@P} oj}<~X]w|cݲΎecqBmv B۹Hq.}S7/t6d5:kK %c c:c21&XZm}ڊjEjUU+BPj:c5}&_QJN3ǒ :86+`#sxVV$0E=颸o/pMnV?܃&7#%:8БbeO{z7?YF't!ԙdmL ,73isR2-DPs/0.uq|^ՓAH/Izxn:ܶO٨zN{賂2{*||ΎKl /iH2̞0Jxmnqknz<فN7m׭;e-~fwozo|p竔AGr??\N{[_zQJcWI̩sb+ƨԁذz֛۵5}!֗oΐ!]G`?m#dKׅ-хA5b%-3ڪ..H%ERL-u=r:cV~L` 2\ OhTmq#DѠK+l&:#5ݙPB(ol1T )>j\IDHxg9GL\EO[@6v&,+G/񜭏v:c5y f>X@kD9kd 2J]j!%@=܅J $0(GP 2Sh HQާ`Յ% LRKV1Hovh<~V{[,Z9]>KkALкF3rpoɘ pgvoOzH^<}^wST+W/3㙼9+fAeR(IkƜ&) KB*F6S\(@jFL//pP=;UϠy>5YǛCB hβZ!+1x`c@$4OSzq7tKq=>(‚)I[Eb-}$9, 5,`M^z7H}3X~ -vp`QZ[v0klZ(*F+O\ EVJ岎.M`M }MBt[I"+fo3 >afn(=5zu1~<_JfO׾br>OsnVM_-M/_ō~ǿy RIW*凿~͸jE_~tr}| wr}ߖ[tӒƛcn̙O^Op}&2cbu+|G߼ojيG)o3ÿ,g///?ldb-Z|42)_\pMMߟM.6]JDI<[ߨ2ﱼˍ !f '+7hg}hrM全oN_gc|btF0F~itKUӧ;I{ nRxQ`|5E0>j$Gsf kv)F_rF___ӄiKL~{lZ?oW2*Z}:b me jRa+&"]ETԇHwo82 W UJ왘'&ae nu`t篷jxbV0tTAXθ R_%B;No Yw޼vcDjR :i8ڟ9JR4xk*qi]qDι&7E%BA ["MDf=s Osgw ed9j"dL(B`ge J8(&Jr ݌W}Iߌ7[JDlr[\{]+0jP!tB}Zhݪó;&A039KBEMgt]b32kȆ`SjP_z+{K6JlwS1 ( &J`rL%TVb,)g.`ִQ"2 *; ]QMR$ AX b)+bQzMJI,( e#w3u m%/Π;܁BjIe%a({ZX7azTÀE0k+LBԈHBXgrXk,! i'y޷"2b!>!Wr`2,?\Q]EL]NDK]ntP! 0v@`cqK1agy%8b0JX4)zϮ@ vy?m%AF=iR%OZ3h4 ({ۨSxa-F(CA4$(<Ş[1t#JFm{GjM>XAA"K|~<9bԧjbCDƧaqk`QD l4} TSk zK9@?\AA!+Eg0IV"q~1<rVyOG7oI(& OKkQJ-H{dǟdאW VޤPH&ecR1G.ǽ4xEG%sT-(IǤ8u%I9hը`Fo/gljsXpL`,[*ʰ>eJ@1Gm7%IXv#vd'%&z>k{PgFd~~RdOuzq5y 6mN//_:Aװ{,ȽitKjX TX؃blég=xVb%Od_Z(Rb5TQFEdlDD!\)"7$"E7t\bF\\/kĝ\`88zr|ףV r55!8`۩JKqoU"suF=UWFA]=uE4#uERDu|UzPWI]iq԰jrq)Vb*thR,N(ΫXFX )*xxMFQ9ɩsLr2O0=aL[RBMc }{=`,@1U0Yd&t7lgn3r*uL6UFSjRN1aHgfӯ>U MjNVb@uLg9vYaf-$r>mn4W  ={pV}É~`Q}I+yoU"Wڪ>DecZЌ]- lK3U^ǧ&8(dIKW+MӚfu,g3șYJը29>[d Em l4i&a?}Ztm% 1ΌK5=VN6hYQR|ꆕtD-5W!Rm+QW\[$wV&3߀ZUfR+Vնd*[M-h@yQ9ꁹzo1,-r a;LyͱYxhJf2kBH1ԑ)KyVCJ W1`07LPSQ(Æ20?\eQ~:U$(o:UboUЇ5 vi,h%$pJkYa96S&aϵZ[XcV"08J D8Xbc%¸m?êm1':1Z=wWn%L+bKje&P <dhXᵌ26MVHKD+0jMu9?bh-#[Ba$$$cN- )$<`fBQkI`- n|؀֑ai 3r'4 - :c !$h 4#r/.c/)YB %)QF(p0UaH&]^CJY`A85>F%Q&Yp` R!&P a-.E,w aͨ#P#\#ͶTڧf5.#%q8N ,AYk \nAKjkP2D[*D?{WǑd O[ ϮF!&%kllQ%†UegEeFFSs.7GM4nֽqN4dEoc9eNJku}d S)ΥjivJcQM|Q\*%6Cb0ms)T.WPfX'餞4/] l[} ]& 3k_c#7%}^$\ @4-+ )D!; x{jڐ]jy;"4#H2./Vc126XAX3rV{U@QlxZCK68U& 'u$jɭZxmBV58 e8ni4 O͗U#^e:B [q;2p͸X:?)j@J4TU;+Qqn&k)' O7VE`Í-NNf[. O&!z*4}4 \{#BK|t)}юbRCJD%| \::$ճR"T2=f nK)Kc[  %@W@PlN3h NmB[viXp\ @(Ф pd..h]qcm\g 0S(ͮJPƁgUkUƂy0) !.}p$,R@'J s?mfH'mY4g#Lk4ZI0xnAmJMKKKoU4Ľ";oR mi/^4,a* 0dkRqc=A;px~[}1dI@P5"hfU:9hѓХE0I0l("ֶ͢YSQ֚Br$RVO ]rta#·fLqP刱 ^O:(Jj]d*De@e!5@JAO!Ƞ rwboڬGdR4v"*TOmW"($'Mm2X4M57J'RxyE ap`R p#X("#z!Ni,WQYjЏQ380)`c#7 3h҂cM b $_TL(X6UK.duN^ljB]hD oZajlJ؝5ZL!-?X/j>bwAeȰV*xQ vY jM'\h M6ge@C")4!2̜ VpP8??{8Ae~\W~O57lY捷&bqkn^StwQ *G/cNS;g;G\\*\GJ][w;͢«uNX IWK;s F@}17L\|݊L e1uGnoO;F> Sjg&jїz(=+da WS5*.n2:| QZ|lLLapfb2s8˩\CRzLSaS42.?oFx7KJN~g?filPT<{#BqPJʜ5}7#PZ0T.RJ Z/[*EDs` ]co!VBh-y)X:UW*IjI{40LqLńa2sޒkpwoǧK.˫=f/kbJ?>nMWoGgngg3L*ӥHȥ'-},$'pjOU) E:}+ Ҋs&,_U=#ϟ-x` o݇Tk˥ZΒ@טzɢx‘gCp=7In91,1sKt\;Y5}DP?eً59{F:zZ}}F}( lȉA*pc?+A?˿|s̏O'*̵tv޾*+lnoNNgo_)Ge.5(!?g }ߖd p2]\Fʯ磹\|&g/;]׏ف@wTw_٥+[ins3(V$]UM%Z|}_J<NXV~J>\V:f;ץn0[ c՜[kWcC]sW4_gB) ]s#}=>;% Z||E߭8/~=M `1V>PV]@28.(G&˫/+Y߳mtc7%,ej֝ӳ ߣ׳Z&;ձW>bIweEswL̔`=waJsUyK8=~Z7ōܥE62aʹ#k0'ӹ,2<=z۲ɚ^:NEs۝-RB*d8l^Bu4Uŵf" (eʄh:cBڪ,#V6Ƨ_^PK`-?se-3$Ȓ12z<av>GQR9ƌd tAKq7z"1US5rݭd5sjҵ|Ӛu͉;PQ ;opTrgϏ,g>7T(?l\>p\h4֫JICP)ւ9>3%vp-qz=;kΒXD4GEs##a%s`1) .QVjfEsQ\t:e4dl,E<2(>gQEqvy`S^˕'9-+V 2(adhy"7$LhHI.M&YS.@yoviP7|&Ea k$TKʪhu\+--1aHH]4*#ZQ{]}}q~V4ʺ[&d&(ƒ2:&`%4i`Lд% ?1]yΤ(`3wނ8WyV8)I`:c!*H)^U[q @P?`VV;.{K:p[-ŠkG8$3xdI_XTaz ~Q`T׶\W꽧L6(?Snd`Ù,_#glN1L#kES< 錌|HrCd  Bm2֬.hBC}p>>Jq<vמ/6ijItm)Z_ eU7<4T|S?`f1~% ې HTЁkZ%ZmH;ŠWvw޾ݝh [PƳ6oL,b$o8cJOaZoN Ј> d2\vmx8Lf)au׳%b9YOQ{;޳|$ϒP{)gP}mÅ90S'B\ЩJIiw!Ʌ4w!u\H!t…̉).FhYk2i_RATAY&ehgָ+8u#g-@EU3@vddn֝.w rAUVPĵlĔ]8IsBAlicۀz7 `" ǫW>2?2?2[Uh^ ՗xe4\OHIeL-?2+8Tvl`ts' =ߎ.yC&{owmF`.!Y18 $}NjzAOy*5-BUcLJŀC)t3慰y^>i;7;ņv)KN#D0/ 9—w4}2yP/ AeQ{aId&PX^J–VdHP1sse`QhLcinE>gKRٲ̝z"J1FgLHZ=JeΓ3u.J>Z3`+Ia<KL:Ck=6z:cJ^gugO=^:[ A˝!`2(l1rPq%Z6H1j [ŢIW6Uӗ" C~dO@tȾGJ!  ,CDƋP2; zԊQ'~CWY\;Z ʮ 0dK_b1LyT٦P2N$K: G+S,. >˴`9]ybYk - 2A Z̳V%I x|GVJ,iW5#|+}-2ʨ)lJ 9gQHH'P:Ѹ1ANIet/F- UqrzhMBneq|ZðVʃ薪Z%0@-ظG3@콱G:`Dgq a(jPћJP4y~YPO}Ѓp5܅ȼCAVAG4y|0AfT<x|6+/ H )5D#`lgMVԠQιEk,!Zxn u)qH;^MI2OoEXf+lc4\tgGt%u"CG>s>ARO繫ߣ|>n#xhHJ)*ǵק1fH, zR(tR49)*c4H5s?26UfƶX(Bbb\ҷeakNL.։]4P^o}4ό,1֟#L$܄S 5ᕀ3>5aHbAx/) ¨:mfNU9 A:~~\21wۢjU-kK H˳hxRq"X0:")DT7A, BN,vɐ,db Ɋ"iš ( #a}|2B L5}1qڨ,+uÈ[DlD% !uHFV(bCNvl),Kٸ @<)Zفrc P6NYi&F bYK#MsoJ *ʞ%z՞pqTMk|͒mqҴY>S ws0; l<0}сp38yXSacc+<Oak2\^#7ʍ()WQLF?LE%ܣ%ZQ]:u7PXu7u72u7䆯^Zn؋ՃK_&/ WV\=LJ+w JpgR=+17pUE**u*RZ7W\*.pl \ׂUA mcsw:?=Y~I/c :Uvt~`<5hLGVU4-9v͡QJf}%rzwF)\~C e=G/KgOkƮ;Fn; g)|9eI튈nKBITF|WGWPІ3ԽgSme*_T{{mIՁZ}y ` w2YNnBZ#q Qev_8F֨]Bdc1AQ>D$|oઈƃS5;p&) &ѷWRIm`6rUW}"S>7 WeZ"+iyη6 t\LKX |P򣝽Z"u:ܯ}98:=)W/9hVRgk7k·:>!3sݠԖʽvCtkY!phA$l"ˬTDD8Q^mP>iԚj8؀BT Ii}Q[#tpy>wt ~{yiU$醊ֺ^w9;njzԣ뛨=;TL) :ۋW #YsŧX<"i|VSNDH.O}`tB(…q:&n\fWjS[$IM cFe!c83`j%-Rn4Jwu8f!ͬg.$RM [-.*+n8W:)(ǓS0|t~DC>΅d:J4f?MW#;>q$VCFwQO]\ZqVJ OTjrK\fkwݿ&W~/MxLVa}s{z6[ac(y͙`MKZ+[Bu01(2e%{W-wwuV9ꬓZ]WVGeG#t4,ic0W]rxď! &[pyX? ݋3'޼]O'߽?!9y7'zMh)pN _xͯ7=)FDެiaf+]|';^~}ݏoPJ*^ Ի!efLRq؉_!iڟS#i]L/&_Ěvq]K8c !8vD`_>{"f/wH ;iQ閡6/u,mÞww='6ƾ9gd2 EIYmk_4eɁHkt>(Xއ.C xk=wdU?:\l>pʹ@Dɝ*428OvȺbm6 tS`݄jdjƗR9e9Tp9=f+jC;FK [HLV+_o?D%|e@ T^pWP)m 04tPmzT@{|d<u`3ʥR9$,pV8 oAIrRetd j7<@>4EvEo|#O|i hkx_:PڗL@fM 9KKfKv*o3;eoioji|= :=֞Yb''H־y!ظ}ɡy/gM"q,Ȥq @ee1H˽NѷVLM|I!]'2#y@ :褔8iYaA$aQHɸa!J묭TFjeg2^_戍s?G$!.۳Ų3ZbyT#ͼş= hZ/+_'-|!)_X]>W8a=W~ UX`"JDe+/`JbZf!l:u.NlX'Iwun)KYg`2` :4GȲJ,I1J!ЖFYNɥd00hE&|[չe.{g+ju6&ΆQEwBйbopٯjGIABm)It L΢AoL޵m,ٿBSra~07׈G0)j4Y[EKRK Y"jfu0L$GѢpK U`ن$ o1᧌#ьjSƠ 87j:Lh)g? oLT)=e:+):u伉fh=_4wYr~O];/o$[HƓ\sεJtLJH M%0?'o26(c epzR4+:fR49 STTqG)i*ĹqbXXL3n0  fȌ;'/7oOümPp|6<|Fl e< )6yrD)AWְ!Doe?u:{.2i8CJ% 163WXC$hSSG)qnFl?5sWPvUaV=j vKIe"Ѱb`YYFBE,17 R^tIDb8&̽@YF.vDf~+" i|wfR1qR90),8Ր,)x10!#;Ec9OFcIvj7s1 uAE)N qI419OEC$0eD}4%kZ|?4~Oa<v^]yxgmh-6y9(pt狪pnɦ*h~=\YZg^77X٪N'i pWTmmI[Wjrˋ :NbE,I5'^m9{`F鲪`[D?LhY}+m&5܃u1X0jݥ`wzcط.T_:W._VWt/ .]X=(\{J;0_"MwC)>K,߂6*b"AlTSsN˃ZdMnz}V xL)>U 7ާQ5vcjֳk//v9:F (kjw3pś8s`Olؾ;9p8)sԯ%MJ]l{C$&~@-B-,`[$DeL-O@¤02am%3z"f0ې1*XG 9AcW( gydqn^h4ĆlhXc^nx ˝ס*|Z=Ok8?Zcώ"-\X?ZyW 'embH55I8Cmn|mUnY~pJ!v WN)`$`4\yqH6w5nƏq7tls󶺍OZr~vn7=+==ܬxCcWcF緪[rk\0מNfkoIabQ~ļGyVjv!9Ϩ N޼&K%Y\#W#Ղ4x4y~WBcy쒒=Fأkw[݅FͶ 4w堼ht4.k-^4a ASDM[QC<; gUfaT~j_ΛWՅSfA[kn* O XV';"<9Ydya#_mŪo&W׷E9`<%]>7*r]`eT_MY܌wV.[8wXk5gv |u}/ ..GP: L[5o5;r뢋֝VQK'~? .jo'\9xi2(gwJdYOH,P*KJ)"38͂^|1Xh~zC ĸ}ۥL栢$9Ƞ=9i(s"aS%-z-Q9Ǫ/8 %G \Mѧy BSr>ʦaZ8M*PשXB0BdN։\@luf BT W)3/p BKkR2D'RȣĽ"&霁e*1)S8=1{ջشu+b]U]ko+}tG,07 t,Hi+kɎŞihQRKn:(g(bw%WrO|1/ÌӢE5*YE勵b}wŎ,uex.dsxPs! :1d4iͶ}5mb a#Vs2,['F"[x'yu8+(kWڳ>Dk6?'J+eQRJfEQ6@^ #+{4& B1Ե;zƢ6eØ$t9',|ILBuPXf Kʣ]W!阁 X .TL6Z!U 8Z'b~"jE.JècұyNcd~ϡktuWp۪;6gt wQv šZtij֞1vEeՁygK*+9]Υ|N}=m0pheCyghBX7[uw.#z.J0הj JԫnUNb;n/CWHU=Y&oO~+_ ?3la/Ug~W0q=$Үä ȗ5O ͒o%*1tz/Ĺfo''od1 =% u{U$2WRUl{}wݬpذOs=7O׸ɲ7;N-/X?sy5?_, 䇟ZL&aomޙ)g 'd6Ҿ y1pUwVH?tRj=ի+{૷ g{ l}fF),aܾx44A1h8ǰY@+zA9 {1yWWi͏*z Ze\s묾J_DD:6w؅Lx8>18>''8-7 )C$R.9bHG*XDͦ([͏_ȉI.d9P=J>Jxt@`RhJݫqVMy}9+,w k%|o~(+#*BeZ3tJ![cI5ע.Qu@yfWzzE"8DWDUOR$j+HI:dIm#݃(u]+[bUP{9/MI `Yuc=k&΁z՟@ywLZ@t !t)ƨ+$ X1P*hWU?sң TOe~O5A匹\K>  66,yB֩ox :TJN'}Hvx.AȚD*1yy^t#_:я8ٜ82g*L&٤!˴QA`K#8H&lQꠂZ J7j[jZ$_b(l|H-:K %IxJ)RI[Wte]ӕAŨz\.. Hrv!MЫE]XSƳ-I7.8vHw>{>ĸ_ 7=NMn>`ݛǺtaQ0oKݜn2- O_p줛HŶ{ɶ67>~ܒ:e..kvE|y jBA.ȓ&s1g3QBOD#;..ǬpV?scIhu>ڨV@&/Dڄ}$ؠ8 H[LR"(="btG91m&~5 "葼ov|Ut'ə?Kl$goؓ=̣ڧiBa5ԽA&u *y뼤Vo-WG8ԛUCKmKKR?bDF̉dPHQEEc`0'CQh`dMX #tw Dp:_Yb;a[t5|ӽCRv%^&X+)sQLte`nj%LE[?j5:wŇ mņaW r|-+LsWǷ~^=ݞn;x/?ȫ)l){໺:ZpNk;v>P>J =`& WS} -ôㇸEyh`kɸ;;%vQ %T9Y&<}Gl{mS\˶gPl#eioKTRRTY&A!T:#aIHWQdR2@DFk9lMQ(0G:VY"+Ru&ضo&΁EvB`k@Xo( \Y:cJBD$%c FlX fA6JP|ʦ柍?Q*ɔ!8LƊL\ 86t"p ͓# 7YI BsltK_qeCWo27>~{lwwvrV;[Ts)֢JgHڱͼh3L`< 75L׀6-BTbuò-CON8*>* .V)YI Tkdl&ћqfXlf1  %cxKlf,_r 0>Oef2 O 0I9AԨ`Am=! ,2c^ %tJ ںd!Z <¶@_ ޖH[(g?b9mCAfXԶQێ=2؝uh*g{PVURU %V~1d44nc]HJ`+:k(dUV,G("C"^fZ 6g?F}>?\q(l~1"GDܝ+yKMʃa ۱Np ,FĂ䠙$0O"ύ5EDRml@K_Rq&RLf+]bKlI+p:8 34FfG3ȸV5fX\l#.!Sq0U)TN#$d4@:(؞̈Cf(+w]-{^‡wݎSE'z.B%B~,IrFuǭݲq/tzfzO[vrwx_ziUnzh{>g!x3CA!s)䏳j9{e_zG멹itN퇅Axl9 oR#X0 Zӡ *AEz #L&XU;4&PL\H9MkP:q*Y" PoL7R1?ҐvT TZ,n\5X^۽p U~obkΦ/%C' $*yr1jңĔ)%YzR 5M$CH FDxDsyxG`vAbQ>xARN9l1X\@FsN'%l A6c%^ˊ\ZVN .JLI@:ml3qy8"9~P_3ٻݽ-m`Ǡ?mnڴxC T&)J P\|RL%JEi(9"#=гzFymAvCerLDJGY-ʑ%E8`c@4"Ot=i6G 8N9^{EV45(҇ 3JRaQmeIbtQi[6آġ}B݋~ و/Fod_;3:+^ehs`,BB)gFz7$,vއ%9 °#cEї]vۮhV뱳*_Dأ)"iHcgWr k ET iБ*qXYBi%G:f`$_%/W977 ]8W?e{߼xTWfXT<y8hiҿ;=JFp}(ѷװWG@fڛ,lwT_E#7d#/'3ꘒ=0`tuj=~_==̅}8YXކ7)yQd8d6\1k2)}:|^?|WWIɾ9FiyoRѷտӑ N{fI:5JĻ.B"kRgV\!7͌&^?s|O/Lh("J&h16SGs<ЀGfޤlR #{,MTUli5v;殜|a2כSπRg8#TPg6YىIy{»}zXx/ Ϭ陾+m?;=Pf qG.e6gD?]|~ : UwO`0%2 Mf=E@8+̃Kz"QzF3z (Gz=١ҷa x %c =?=go`3QU1R!*~T5~?.91qůˆ]TPp)B)E h:X0bhXn-#c(8rB'~d:JI2)mxdG_,ӜubL^o2K'谗8aɋn̡r%mvY콹Ey=nT=L:̞U>UU;٠%N0({gq AɸYwq%_0?"%³(|N^4`ЇL7|Jc@84 f]oS#M+]rg廫Vn[f+>ؖCZqvgNaK!eu_ㄗ(I] Aq'-)k% t8t-[Fr[]\Y"(ECD"N# )5EHAFGnpr2'a)*H$ON?);N&&S:`IANI@wݿʭ4|e3%6g \!se>DIW?~|3KGCΤ{ZPoZG1[˒Y2صܖ~evܷ/W+n㿵h A ~5lhXK)Bе3F-kA}'XkZE`RGSJa6>I~2;J]Z;bxhYt3 iU")]0*t640!Ja^iS" Y@SJx->cm\|ČʳPaCaXda !Ȇ7HB>lґD&_tlm »pҗk?oPPe>x>]Δl`?-jƛ8RV5/Hi_F-k?f}-h3uk>q*v%UFZ3Ղ+˜RIlmIc0RnmP&ƙAؾC[A[ Ew* ]+GеG~J(1DRp&jAi."FP@%gi(QT bZh̹e!mM{EW(gEaZU'qɮV7!{G#$.DF3[E:bh::Zʫ5e>£ 80S9-55:7a9'Ȳ@&cSݱΉںXx(▒(5ut!"EtaG"q܆ QŅjTc3卵)TPϡa9$$V8m0RS-yA)~ab□^kݹAng|{']#Ǿ֏Kwv\Kf^+/лZJf$:3}Xq QO)Z0m)~]a1rE؄ A2 ƨU1kQ:\K kY{4٢B/S44yJ(H0JrY1Ʊ`_P(kڄPkɥXőp.uXE0L*B#E$g|o"t g"pmW/ ȓo4ڦ=ƙAZP1} S:njE5c^+%@B$d )v6/YBMh+)Qԁ su fiՆ~1X> {mܙj/A6$Qs-9˝R 3|wgsX~8㚤0 B:ܙ%H%)tc Xx 5*ʢ!+n.:?q 78Ŝ9s˭ʼnuXSTV 7ogH?2A0ɵ r B($S1f4j|=#,RFXpRw8[ fc3&^R$҆nmAzմ-5NԻ%@hm]5/m` wV,{tiH=m2ě1&3azT\+,DSA8Yc4b1h#2&"}it;_AFm1{KWB'f;O,[t].}Y_(o}Fg61;P8;VD?~U†*I_M' mpQLQa2pN<J4TtU8^ô/& O[ɭz)"#IJ—z&V20b'Rwfo`_0wW X <-;³vhUuYi[IMZցgs/yosƵ.!+?_=yShꅪf2ޙLGq|U i-2j8KOɀ,PJ ?w_y9bT͸}$(S-ْg p\ sFiΣ%'ONWFy^*&ޗǙ?9˃<~<<ŻG9zp f`R$4uAkwu:FN5W7q[MC{uTզ '=gLu (JW/Iw\Ƥx~M $| +3W}qBTkQ t v9[,Dh ?MWɹHɢqtS| U̖T>1(Qc kC>85 ?j%AF=iR%#3r0>-)<a-F(CA4$(<Ş[1t#JvBMkGjN[mnygcP3";:JLQa,M*w #KV.*{sk4qD0ssDښz`kCovEo;h"A: U¹Y]X3F1)ȬPD}Yt67ݹX nfy?|r>cn..n eN)AĸpW,lfQRM8cU")&c.dΐùAZ/BzI7̀f֥[z:qXZ!|F "+ i tmc8ܥx4D|%>v7ge#+f|Q?sT}ݣtl4z8IFp hD(%0a2,Sj͇hniSu쁢0΂@F. 5%IR^qB<]Wx/KRp06wG+_O6d=[lj+`]q V$-Ju(I57ޖ@}]8BșMȔJge(CVY~0!dB=7!ޙjLH&d PA9gN)jRF$OI(BQ9H.Y,FM(" Tz!kq,8T.BgAt hg|6zy ZrN38ן[=sWaǺ8,]Fv c}cr5]0A<9Um܌{u>*u1L~Xc=4U;p&gs;o?-fz8zX!:_/z}JꔭR49o5cP1 h'bP<CCJWq1ZI qa BT&01< kPy ~ϽySU"UYM:-jR"8"|3ڬ*Mƍ>"H0;"-QVԙQARC'X@ Rf:sSd'ŷ?ߝ6ů:BY;5'qȣw^ر; fe=.U:"V5ŵE܏Ҏ7+>8;kg֟ dh+`za率 $KV)B  'zͭ>xLK^&ؐGUcYZN/yJ*K\4|ɥWW`x<D>ue{_&,SežB=PBx4*+ **SA]=Eu[³/:ّU{ {'\TOc'y O4v'҅g=0tfb繕F('y\%|bw}ߞ 4G ~Ë#ÙPJpANSBdt;2>&D/!X더+wl܏Z-wߏJس}^ ^-0N OZwT8* E Bi.IɈ﨤1oεn׃'ȃ+|]4,ygyb VdʁuhL1s/LQM$AO4ZNyAY&8JR"t%Y0"먦S^?iЧ^:"Jl: Xy9O*L;hN8 @we? {$) $ >pƙb(4Dw.Ehe`DINףYPo~1(8OM>yqzIxaF'klBI׸& <$kա~ʘ& k-Św QG=.  ֝Z0; }M۳B\JX\NjدU5X6rc<`}YA* *u9ޘ.tB$/${L%SFZZF*R)hQ2KuWal EbS/ka #kxZ଑`(pg tu:Թ*hw`!~l p:Q2[y2VsLш%nFi u+Hm72 n=Len=ݸt}vg%\U?p7Po0ޢ5Ȼ8.?1aDMR|=0gPѵ*][.;SI"\/#eLQN$Ac oNF _wZqԱ╻t<:P6&EYɃ [NPZM p@z;\@8!&u^!'V%ٳɧ}&:=k.LRfcu 8guk\+.p B͈_;nͅl 19%޻zl9vnY͸:x~a3AHKK֖\U[3 3JGrCarkп4ѬUosLV'lk6Q>̈ Kd UƛTzLA}/󬶼4ף~Y_.|_|sW/7W^9G9o^=Uq &@]o`M>k{Ӕv47iq%3Tx=?]k*mEoTy?\]g4$Vx^#0 RlU ߼u@LsѰ&@?n+ŜWR!1z@m_7`6Ji0.m̖T9H$scҬ ''0z1~v0m!~6VF$x2cD˾kݬENewSzFD劶O$FGUƲh yPi]R ZǭOQ gwG&D}|_4<#7NRțs$9> +J$yPBzCm]1O,l;o4.-IQn#uͬӌ5o+ EU/Lҫq]dNRH]i2@pP:l"C&st-Wu(g=[/`bՒHR CVeRj#GMAn5^A0q@$d6*A fݚ2LB8엗X7]m^rѷzh_{6d]˥a> ~EjQBh2HǟI$krR-\m 9I{nAo_Iӛj%e iys ,؅QKّ}cPUwo\YkD#}̓[כiL2]X84ټJKA<-<(N C#U{TywTyh3OG ze#;0Q!.zϵ1KE'VJǭ. G6|̉ wlt-ʇތ/gq=sFdD ҀJQguȰLFOm@#r"jӔ s}n%EaR`\XAzk,$K3Z3)(Tu/Y_qjsr{Kis}]X.\dn*%+3ū+q|^2hL h%02MxB5 Gϳ:qV~5^yeGonE/6F+ŶNJQ[nq#rd8?b#j$ۥvi#*Aꙍ(ep,R8\PU9S<W̎Nk|x}B> /UUD袭 [&ͣ_=a~fpݫtCf:-^חoUNEUI-4Ͱ?)6P<CQo-ԩQjgo~/q4NI.UwJݹy OSM,K+m&YACҿQydm=6IGWN8 3ѡQ˖2 I$I;Nvs=4l.2.cEj'CSs*3rlgOQruTeyvxf!NLBctDZLqVkYB!k))[łhp9Cw{ T@Nm`FGN/ӄѨZ٨;;s8,. kb4SG.Z=#LcQz"N(KYPYG8,.tdqYO.4AV)̕5D!" X j, ,P`3;" [da'2Ȟo3R7OKE 1"R!TeCsM\#&#TQD=An18=dtš̎lg7Ag:j{AjR6MBfH$!'8sgqqp)pPO&uTNo8x1t8>OlO&}O5y[Wj\\N=|Y 4ip{W闟*J 9W2Ei9 Hy)q´e)J_$vUzYΧns҈ZٓmT;-6HW:~CR smF=) bJ.>O0EI-AE ,]  r|Got'>Y@ Hd!epY c$e[ɰ#PʍJ2J#"2! -J)@ 8&v* RDОG4s% YTN}sEcOwwR`E#ɎEpR?ԁZ琉>}`ǃдd%c s]jmGub]F(f#GA<{/:.4XqWȡH *U0O`"՜b:Bh \>+'l2ʼn5bgd})@ܗGG1b2MdC͎`_i|zLt^YǍNʳnuRL=!Z2mi)BS)%@3mNvȬTȡ=e:6 ꝱ)5w,zEa#8 d{N)yO:e&S5eVMY D佖1%&Z܊ufg<!`h|bu9GaoTy+5)BAB10r`UrD3gia <* ?s~!NqY$P0[Xj3H\扚M<ލbmo`GMOZf%9Σ=/kZ *iZ&8ٺ4m)Ik\?|EɊ}KZJgSUBsTRJr%f rx OW15Ddh-R$y)&=\Gm7 ,&iʍَJ6,̦b!ό[¹D٦Čׄ',۽b'kXԼcȈJ39 Hi0(!B+<+ SO dא= JlR!`V0/2meZD"rRԑKmdb jgӎ}Q[dFmѡv`7iQGKzN3j1HRp9p6pP0v셇e01 l-M4VkN3!:-+7v'Dܲlp֢-S2֔p^*B8j57=Gn7Pp ;Z  &c>X 1H%RXhʩD9qYscn7P;Rת]evK*{8BȍMR*U92m:AM@ bxcu O˺u&[:ٞv۬:%lvvΛv;r> g7L[<.:cȥt&O]햷ˮ<,k51IHKP2Iiå:0a#O^ڶ(k]1my8>kťV ̷ NEYq?R wd~|HΏqA"]0N>.C kfj?^PPpʓ%/G&xEۧ,gCnӾUmgD6*L *Ӱέer ?x%h,Y=Bq}h~sOzҧԍ8K`oc/d~ΝF3R)?"*qqp ydM"/^Z9Tzlųg'b-.hXi-#c(8rqb6NN? OK隳-TWVDudbGLS*`RvYK@ҋPXt?`AkS/*/-esPsAd*Icb`Esb8(LFno%Nֈ).j\$_gec8'2zMTS 6CfnnsUnX9"|.ey0%&)gk|hyވ/Һ̵1F)>0W=$U$m'Ezo ]DDeT_MY;ߊNNlW[^V7rR,sfu9eKT֙8oI[{ge(J 9W2Ei9HyjN #o< wY\\rqRq%+~C:/s|n@w ɱirOMϧ?yrZʄ2a^!0yTQ͍QmQ0{vFzIru>p |%&Bۙ0cDfMvP|ϼ+3݇rZy\kZڒ-Jm|,VGwkf׌tLݖɲ7kHa[>ok`!Ja^iS& Y@SJx->cbn0u&}^)iPi 0(.-"(._ /BA^庡|f 9yu[v_Fçp8|z_KW+K\b1{bZٱ po[0[7W l6/.\Y/zT7y78?fzUЯ7߾ aN|x\igp3q7Vە7R-}$d.L9jUk%q٧p<:գ'޵qk2?ܛq~7i[ln4.mx#K pz#)ȖIQ'r8QxPx:,ZU]du}ٰ*~VllYx*y]$ѫ?POT]9x0)o>}r\xd^uW2B,$8̀t,nwYA[,=A#2Q Ns[tnH%ߙ** 0 Qc U2f1^$D2]Dn:Urq{ C* Jp2Rh$PWe30-c q]U BDYvnbع!vnbgھЌ thz:ίz:tTٶ#EWD|t5-5reə繡(rs%t`x.UKvp*+}2ƠK$U/lT.ZNE3b 3" GB΅:zL'r,6CM&n.~ Ѵu?GAi/[KVS?(8ck)UX+\PLq^h3DE.9%#1ua/!jrN0tVhf+R!)E x.Qq RJ\q@`]%T8 㓏$SD$(@ XwK9 Q æMߢ`h-,];Oo]Ӕ^Mɻ[\RsVt\}`MW \a<>p|12h(JSo $h--\К'kz{1${.FdoONg ?zލj2M!So&1lA_s8 _s/տg~/G+7_=\կ) {taAueZP'ٯnux8aVK_QݤwaF4);Ϟ~<*vo{s n.ϣz~KHO,^^ׯL*TjzL ~|<o*3&_>Vo.7H_rosSß|=kS); sw[ޅJE%b4s{^43NZ^zWzidFO"'c5~GZyZnr]6z^ڌVzϪ,[ύ b7ͤeeORd8Q7d] ԙud|Lv({T8##x|O̚7;Jج$.w )M^y?ɞϢ3KY^(hRdS6Ī}Aíҳ&n>nݗѭa]hּGOeOl]7amFxi yɬʮG fp#T^Hkxw e>%aH.,y/oKBZ^"Dț!az}J&>{,̀9[H< @¯¹}w9NËdRP tb/~nJ&v4ոpb L~"2H$~,zd0ʕ]کr{YPu$2]}XnQ}'$ &#7w&=-2I8h麔TK8>}yVq<K ʁL7M%:NS 4 MvHLeMӪ@4r+[SRLlb^XG!>_ 2@t8%yS r`*/ZH8h!QZ/4n@fг;tղDQ0> dC*jڗ8|h>jgR ̤o&LBuƺ苲!rm칧5W8rUq$UA0 Uܐ-:Td*J:C-cM|1{phZgY'9@('Jcd:0!KA>rgUC+7Ɠ3_ilK!-ظ}gmƋ]A!z@HYNxIҁ>)w ;׊KY> _kŭy1x٫#Ũ8^ǿC8vq˪#׶(m7CDt(zU }EV_7AAkCP'A,Ss奔x3a`\.=ww2 th)\ pkni:]|쵲2aDD&8 jUTsc%bD]9;8 f3 8>f< )O ᜧl1xo#cgmI 9Zr;AfR_Ia&Ǜtɻ3Y|M? [iR I%1C \壼L%+n=:?Y 78Ŝ9s˭ʼnuXTLXNjZ1,\(wp)B0cF@X1b"i)YwY {=2Md'@f*ϗUö;@POaCۼjiuW]FNKrFM$&gF ,fv .>严?g\.A9H$$XM pu-3卵)TE !_N/Tj4vQ)ıH1-۪F:h?wׇݍM=\%zasxʷbMT4kz8x!`Kt\3aNpu>,8˨tΦ[F‚b0!S&aJ;f2A@KȳJklirJ!_)P rU_pJQ GQnX:yÑT 82nX)6  Z6!:HDͱ#2\*밊a,%V\TGQ2xaKtXT S̆n7:5ƙAZP1} u HbQzMJI,( e;5_qb߈퀽*%T m5AqjA(Ҥ ǗlkD8,2֘)m 5^idP$m"I$cBK=J:v ${ %Hl gR(wҋg2NW|0.'1@E#6tݦȯ15lq]s'/8>?^T Φ`\ad>QH#)DN}Ե9yQHoѩw9 Fn3zf  OF#OĹ݂Y_ϦE޶ȵ>7rDhi')b,(0, ZXXK%b!]}审P ݰ<%{ ?s+t6~5%9nZ;Ik8Ԙ"Kt5rnWIIzʉaftgTftft/fr 2R)")]JY#JNF!S ಧ֘* :"UgPɻ@E"X81i+Ĺa!EȋU>ly+M{춇o=QwƞQw[u_02&' %(^J+c?4ZKj9ZC#%` 1و8$>u3%s ,Q2SrJ'UZi`3–"=g4~'؍k) K%FIP-Fl҉)j,F(Exҹv.UGhh(#hLz*:k 2d*`=:pk ̢26ֻ?i.paE$D 3ƹiALٟ#ADnqe2\0Yg`ȀPJmZtN2,a",yMc;S{<֑,{z`ZOٵ∆^@Vsy;N˫GeNڝyўlq<|]#œǓ|<~wp}1QD6lDy~'=ɁڒT׷t6,oN,XF|6'M/jU7zmn{WW[\HiXm:gs_yTfĿghڭ|Cᡚt'Q:?9/~*?pϗxqOo@?Ш'Rqhv]p v/woZMS{WF܅cjq\5B v?LҏyG|zrZG!ebr,I-$9I/e<#v-jpa]ݿ]mW ?ORB,am?ո-1fqcKTCfAVW`h>m c2p+(E EedϏ~NOJOvPg=рLXzYCY' !AĤ@!{ّ'lZS󁏐A= bΕIwNK+ Ӈ 7qEW ˖l.L PU8;Q}[;1RM'.ME9'tK$[mfl1o4VcFbfw.6P`")Ec%m94A4іz 䚅& uʞ^N< RΩ(s\xipIQS$)SJJ&nyUi=V~ 9U2x1) )9KsNx6ʨ8wL8eărF|Nq8e 9 *A[I %+brQYA!j(I ^ݙ& .V貏(VKZPllĹPݬ(&XJ9'~V*&3bȪwbv&:nI#zݼA:hi鮭\I%W_*JU3&(x ,hVi0NZTBQ[S!U$YXLʮlds`L i&'U3VvXT4㾾PUj w[dg;ݞW>?ey;ӏǧEd͐E <Ȓ@=zDB;s$^[ݥK2P7`'".xqBe2)2'q9bhlZt6B̾xjq_+{m=xޤJI=א,Bi@DǣLޙ(%!꨼k4u'**&)B&d$EQ!_mlRV "%_i5qnmP}-/~# q7 LD TINq e![  =hd[2m$ƨ`.kD2gB JԨ)Sl"i.Kj&#~j?!}@~q,Osմ~2e8`Iw[eXHDW9񐔐҃"0z$&5Ň}մ^}?|'4|'y'/9QܜإQَwH(!$[M`S >2L5*0H &c[]Z 7+i h TJ%у 21X JaH…S$ Kpd\YRQK(68ǹ(c&(F0B,֞ &1WeqN Ư¾[ˠygȍgN QQ+TL$Wg&&tL9D *N\8,宪Wɂ7[gJMk2x FFFYʖLģ SY(H*'yȞl]pXd 4qq8 I fH_lgĹPBvphrs&0'Ff]rpITB)N*Ra'0jE~?Gߣ \&Z(cfHeXQ}ͥRdkԪx8 %g_4@Y"쁕z b%Y+v,lR&iA$Qq􊈷N|%D!&0!X˔ [H,+@pV/5JD8 Bs0hK{gɼswm;RwW@:ޛ .Iv cTHʶÇ((-1`[ U}NO=%%}@E=N> K% 76fϩ7 ՝6=J=F?n+ssezsuߩ砅gdH`٘"0\i躹*Rj֛h|N1WE/Wꙫ"]7W$%0ݛhlem3Z z7I\Ͳn]~5aLc_u0FM;4| c\/j Kv>ڎvfn51)B-e;Gq0('0W-ʥi?E+7܀p_3+.Mtz`#Ml2J,3핏19:zǬItT\WS߶gq(֟`~xvF- W<^ԚU_odNX6cn00xm,y,sfyNBXB>*NfD1DgxtaO|gBm3k7jZc(" 1ψ;+J\YVu) %3Vi,&Fw6KdD`ʍSFM{,$ .d9-v1DC1e !DByy;QKK4܅*j'QR1nW PNi :`9'^m.XDP`iSn|L.CPĭ $n"S2]jg=wEgZ|/Գhvb~,rGFGڿnV[\<߭~,*@1&4-ݘr$󎸯GiZrQYNxȬnטAAA]BJy,<Ї!I@(s)!YڥsQ|V}-QWE;60%Ýzq-RBnVigZu_pŶ*gi-WM^^wM^?Jkz ]C5F'8sJC|_4-fQ%"Zp~28rjϭ8gvo:5(ۣ|"S@_'F-pgP)1,T l ^:+T>4z5w)q!X,鏘@8 S(93WgO^]J^liDwN(8nB!Tb`-TRXK)zPQk6:^ .Ҧhm| 55fP馩ODFcZ% Agm8:Ƣ h5iDrI,QΎ;!]]Pp8-Ԭ([{U\3, YQFƢ R~N8x_O鱶})hkXΌ)ck7^$O\b|I"'#H9DHwNԻ( &~ ~3ĥ|@qug?|χ8jZ#6Wˉq&[?4%9/ MEfg.*g0eu={jr[~L<M<`npLYqn )&8;8 67S?8mVŒCZY9X]`,ym72FnZ/h@h~TVQ[@ޭGI)B.6}֐,GFo絖իG\^޻cauЊXwm~?8]Bv~7ҧ݃嘑~m}{dMpE͌\ s> y4<>Y-˱8(67^(Ev-jFn۝l.IcOZE#XFb5г6MUnnu]+ VgUGZHiXdȞ~A9?|0Y ̎jsC_axvBO7}G|D ro~5:@.,{|}M/k4Mכ޲aj 0vAt8~7b,}R^\"5 INt9 \,al͉?+qNؚC~@`׹pBgWȑ-<2gDrί0똭f󀳲ۇcz܁5~؟3zA8LB,fed%a=A\񗝇S7;q29;I9lRg#_4g1H԰hYEQzs\I(=vx_!:6`8nIZ[%m&Id6JYgSϺN aa$`ssQ>"FK5& .\R[@,J,}R^je/ v[I{DMffDI@+\A09A@J!ReJ+BΙm  `e~rqfTQ=` hGr_CYXx薦Y5dFkDq[9$qO/m`x6/~ƍh\ҴXTpCZ: V5x;O_h>rm6(v~B`$AR$u  %LW^( ^#;@@ZHk6#WAA=Ƙ 4.TsĹ]B] b'٥;heVhO RI|,B!"hX`Y8)%'MrO䙎 GUP Y抅 |[f۬)-?},*/cNb>Ĵ\y>ˇǍ06PYi8:CAcA6cM0%afKN2%λ:I"yOYNnp bp3#s4Td\ yVI@kBˆSRR0p"_#&.2\rrVF3&30tr(y:r,zmYfJ$$1e`2  zcN h#i( I Qj~sEmH2%/?Tj=D jyI}̄ʊZM=>A1R~8vӋN~T['tD%5\?w<^^w`i6}759o 'H QpXGe Nq_5eˈ(;Dq})Sr+bcؔZqFeb0H$Ap 0BB2g($^2XobAeZbs`IaupmEFُ.NjpZgkZ+.|;\\3$w7Í2 |#q:mrX&sʘAP@ʀ=o[ӎPVf<ns9b*6oﳫ#ݐ0p(mvc)eY,nʕ&g\yr iOVg9x|vװ=:\#=\zI)/5C Rc$@* QN!,qApV\ض]uۍ廥EV솪Yxs!׊6IyP<$"T'591k>s(]|*W9ltVfsεYT%s(Ym_Vʫ[[ԼPr>$wGy6:c*PqLKueWzM@j֟_6pjL\.OzaM;x&{au=T*Z͇ ~-F\G#)iR>`﹡(rpL#2Zl`TY)TwEo1;R5I~?OW$M Q${Lb^ب\$2]T$45 58p.l9R1",DDꥦӞ2f` Xy$RD[^hM-W&ָ|4uYLJD=>XjXM !rYSQ, ʢecg`l- k  s&E"D1NHf" "$]޿08U$hTFJKig{7b.0`"{&S,I} (08)wK9 Q Ƶ!5q6z!jgR~wX VV`ؼ,QZ^g9 LsE ̥.J[6I$>O.D`hwP(<z:ސG"ς# <%v:ϼ'd (K&:p)4F` :iy]7tqSUqtGJ* o"`3ž #Pd=ZF}k~ L- &mfkl/iWVgC-ξwt2j8cJ @!+FW1XVcr#lFƿw96,!Ta*Ҡ#UsLahKt 2$ccxcckvd='p6'oGrX_{h?M͌O7@'Eh|/|H5+;} 7?]>!Aˇ_${OJ_ӡg~S/RƫߔYӣO_y^',?z3tw~}~G%e/F_`^HfIɥ)ͧͫ̔~gs_^kF~siҥ/WSd *J /QRmu )@0[y);43MZWQ5=4XrADPrA٘0 0${vOu˦gd>+|*U,[/aY vie ?@$$~N'Pf!kgSr~D٣Yzqf>ݬ|g>V~>0' ^M_gC,SD6 L *"~x%_kOk_znK:G9zQ~lTJ_~ S7D.9}m7*~) H!PKZå'2 L?v0_ޖ^Z#DMs;?~_KUd{>{4@@9 ˭e$w EGNR`g?6,6l[_]'ב2M5ŀ%#iRS(Xt?`N+kS3-ew3T .@'ϕgˁKY{ KH0Q0JzHv|6iS<75]JI) >E{pCb@0K <Ȑ醺DSR"fݬKzԭ n``JMRd!db'7χ¼̵1F$Ay905$UԐ$Ej7L3SrYODղHDR> b7}w~<9V;|y5ų(N-n-iq"\ecȆ0gAG[I`RIu7!^*YwOXXNBmQEQ\mc ~♟DG+;a4a, N 5Aa-EbܙtXnɞ5%|~pd@[)SB'F cѹAq'-101V@/i$LG 5542ݓ4p½Ǫ䊡\х Lf!AgҒ$$;#X,W5ry8(~"O^ɛMуKf |0:ĕ˟n&+mfRj=c'!I&[2qVLK̢qI;ǟYEM2z[х멤>\d);k! Ps9^@U.Q1@w΂^-K|VJ;h݂uԑ|'7Xtiqa+'?Nv%Eqdt0vc e & @]vs~,Tcx0>ʥ)f_Jf ³WVwOŠa+u u[vFOܿqi ~-l@VLK\d1{bZ#*oi\seQjs~:m-ix`,R*g er a)$Bրwyэz%"f0Q Ns\!)b Tr朶E Uϥ[9,KC5 f;sn*Tשx555dzu/bkj#3"-j;vtOHi@ѕyUQt)= BiYΨѹ 9ALn3y,8˨S-8HXG"\JlB O^c*5( BzqyXiM-=?S=)D#Ea2BWVZ\-r@=U!Ƙct<귱g<@rFϵ,wJÃ&nP+u) h؃Ấ!Z烶튞uNgWL'_YTQ{TXQoRtLPөGsmøO'q10 f1grbqb#Vqb=xv>!,\(wp)B8cF@1b"i)WY_׸q5;5IK^xäAt?_/LjE Q{z˅ճfJrPh%i˄ F TDta~0Z*6pó ) _St8>,<P֒:dW#d z>E\hY\I 6GۄʯT;9ק\K[F߂Jw }OAH1TJLw4L{5T;Ά6%]̮}'pK{BPAr3u*p2K|)ЖT%uhQ6Vf:aj&)Yo>AIeT-Q OYJGT쑯Y^9_fd0{Q"8;|F5]/L$ s3;Y?cMlIcٙ$bKeGembHb["Uդz[Y745ҀeX4aT "OB҉ӾNg5oA&WEVmw$[rcoWHjow^^/GY츔*4ζZț+reG&~Y|̻Ta:q!}L9zJREx8K-0|7Li Cn(SLpƁ ;|-Rn:Mga:FfVsK%fۅ[-.YVd紟eѬz'Wܘņ}H(Ⱦmzutq69Z %~tq:= EM^=U,ҥy֊ZBڢWPjϠ ni~`v{f_ͮ^depBfQv0gpOOfc zl6h~h7ߜbpO52%ږPnl]3b}3ܮmfy%Ei4nm`ŲGzzlxqWmU[VEr&t4,ic|^N~A?|0|eC㡊Ŀgfa89}{ӛ?9~F"rNv_`gD[ U|MsUU`v+6L7 P]~aNgogrٞCU了 ^}$ijG廘O ^]5vd8Մ![:5ǫ} c#'Qj^d#@_q"9(*BN:D@TGwKӪϗ>=Ayoodu6Uo#'ӋT0=O D-\K9Zi˟ :,a4H \SFdM ڦvv1w-,c6 BJ-u,E!2$̼e28CmqCSfG F JJ:2\{.1EZC&u&0l_/<IЅLJ78jqW~7mJ {Ȯ¸7뙶FH# E}, ͚V'w]Hh"o(} {i7VO[ߪƠ{5n+moɿQJq\85b}̣[Gƣɦ gWpt,ɤkR QUqR$;/EQ:C(#Jݜ?$'dr^0-P\ "S 1cd{BQd3 6 (eL1 Q[@2V "9T`ܻАsp|n#i>?΅mgm#U#tyg!6dv:|J筘NR0Jc5a .5t&)p 1 Nc ^sQn58XNxgU+kw Йdeparr\k!?GJs]3w%g}z|>_uAk{_/;Sgy_Q=g,{7G h_ @9Kο%2;)ZH Յ|$R܅s!U\H 2GK @x%I3/2Af *MeB]Jmv7%P't 6p|y2:ɸ\5X)*Ĺr r.2mBZϭw;q>I0~\AvO cCfRal0V coٹ2xyÞT-?Aiݮ*qߗJEZ^HYȿJŻHv^?\E-SɩFsB I.(9h|Cc!𘲆PS٤w|_~#(~twB=@DVFIŸq!HxBYxuhuΉ3 Yգov[N> [ZC·aڬX.jwc{\s^OoU2MJnL\9CyzDv%Mc tCf5~g&ʼ6;Gwn4eLoOU-M5;j7}vGm3n=VJ* )bW$.p͸lARKdgT!$w餔褔u}s:f.8fs:ֳPG/1Qro\0w-/a2̻1 !z#B߃@*[sC~KxI{0bٮL2@+\ZApqZ'"+)xC D5q N *lӦ2<5/5)@iZ_%b>T@w_?.k\wڄѰѴLI8D44ho<0S4 Bs;# ɭA sf2`FCk#*)$1(К0SԩSr))YoM"_#&.2\xYpVzUBu;F!ȝ&O |@?)?TE-L[iĔ$: L`Gޘ(<'Qh44@.TQY" 6$ ϒb*u CBOJjS +jgl(&XJGsB?+m#WJß0`,^2/ &3 h-KZx[CeIg0dd+dGӏeT҆ikhk%I^"Fa1V[JpRvCUιtV@ QIA2Xp؎A).h*G'ZSEԲ_P2zE"DOp)tQ[M*Ffd j0EAH-[#gdl& []e!oYNU)J}2n^) [ &WWF} gs1 r7aC H2RE ŠO^u(!){D%$ItY$R: i]IUG[lv8K*HָcW-Zڢ]#&Q"WQ.Fi7Z:`sfpJCCȅ4*hȀ 1- A ('8=r.4m;tFfyX(u$E"JDٲDD$@%NSʥ ތ豒z$8rd21 pP a8"S!!Ɓ3uS/IXobAe:bsIbuTdmY"^~ 0$!u%Ŗ=b'F p uN82f5b.2O%X}CqNPV}<܃'js5b*6_#ݐ0(so6QAپ빩3TY|.Z0{T0!5r.g֋rHyJ3{CʵfE.Zč]p>eo) S-K)-C__aee7{ЃNj¶ 4K6MS ;9\?llG(_NkWQnVR& 1A}Ep TQ͍Qm\:vF6vI9hƾ-dZdNy(K$QD١O2X'j=JI?`{Rro|Ϝ `Gi~>wga ȪH' &uD9hkcʚ]W_ԍ}d0]-1lJa^i5^ wE%K~trk~t}r L[ Y .?A6z _P:βշa )^cirn۴D5lm'5N{ٜy5ni]Q onb0FaX3GٓQ:~z< a29ӣk5g<'|1U#) Z!\YRD's~6mDix`,8T*g er@)$ݝ`ؿؿ=k?B`"ɮ=DbJ%tJ]OrAc9DQ%HY'm>X*=_),P–«r@">P ca:r0p a,3݋0!%z=^FMgLrJiaj#B+hZ%Z[}OCڇ@T d 1@u)@&K@g7o7_GzM8x96O 0Oa0KVOo6[˛cF~bA!B('ΕЁ 9ӿv?o;+Xσ X3*Wƚs]ȱ (+M;1~CQ E"(` yL"LSa4 ),H4%MVADbO 8.(`A)vײu(LzInhڅo=kI y(#0Sjr d.&Y 9.jep&ٜzh Bjް7nxotyjpt}NE(,Ϫ Wfr%ƞ8zM9kݯnff냜E<29?K],tp+4X聎8⚶oqpwM}CNSA5uvI)#tY{dcɄ*녍EK"SީTxF,נ3" #l BHZ=؛͞wŦtHG;ɭ(K U2Aj]o=jyzZ8;66Ƴ8ck)UX+\PLq^h3)2!dWhIH׽Y; { 2"FkͨW>8B0R%\J84〼CaKp3"|'gIrQaqR"!,s$k;fklaYe$y|I,5w{kӛ)JǕtj?yYw OX^ꢡ+M5D(BԎx j6z Lv3O?N4N|oG"ς# 8%t:ϼ' (K&:`)4F :ӊIpݸ~k}!8?x^k%B y jk"3ž #@CqfRtUd-*hX6CC&PԶf׳ M|'%ԆO t (ξw1~GN%zTX#g(PEZHTq̍2%mJ ,963ȀX&A.G>g[zOcYG5͓T?9=}2~?z~5]!SE"4|ѿ~Ʈ >p6></o`9ԯ+ >{Y?mWjCϲ?y#o "hdQnꗟ&'fz1+yoi^{ۛ274vKE({u1^~* T8PӫdRl 8#{w{?*s&_>m\W7H}/`AMz S_~y;oc1;Ksw[ޅJEb4Q_h朴4/[W`E$*|bl>.hC!{1u&eӪe<8ze+ 7>dI \6I /;=;%3`hq2;"#')`d?Ɖ]rxLJJ_.PTdbLS(`RzUKh] Ia*Wvi:^YZ;fI@nTݢN6dILFn$V1LzLv|1i S<'7-]r _7, Dyr mS' ol&;}iiU XjVJDŒ%_~<ɍe/stLQ$(7  HifJ =߅GIW-ˠzNKDv{;:=;خ؟V׹|վ0IL>홄ueWj햝8ӚFe}`g*77arF¹ymZmM*t7W[:M|3{thZgىϛ Zwhf  Y12a@skZ% 3YɁ4%l~trGAd).RV#^Rt\ Aq'-101zā.0%qvkSU޻'ξCvq˪G䚢 хy I~Hkmyy>ABa9-55:7 9A$d!4(>D\ |@.qK.~ΚӅ 9v$m`a""'^q□jklh﹫4%==0 tD..\X/͛]/lkF"̳1 1Ї`r ծ%Q@_؄ A@Qc(,k zqyZi-?܄ RW&Tz2_pPR8Rf )ʙ7I bc)㆕`>ġ EhBmDjU RYU4c,t""md^zmmI 9Zr;Af?I;&7z Ҭwi7Yg]^k7/,zVATJjIX‹p$«$M/ץxo3y70=xkBB ~3q6nb|F-Cw/ < Dآb 9sTyG'%:zVxP@xE,ZU]`u-S|X;8-v^8 P;-u)o{\V6~RzDե٨(w`R @/l\f^uL,Ig93qVuuO'lo v2Z+jrrm2ac\g\3tnՠrb#Vթ߇ XWlKA-4A0ɵ r B($@dh՞k:FY^14wjF{=d1Hwq d|Pjؖ-vN3mQ3h;H҅}_3a]Ms9+rz==1{9v8@bDiDno(J-J2Ie!f {@eLZcRU.t2BCŴښ_(ƝW4W?ݴ7ޯy|9g1l5e t*i4:6DSpgSg,΍O.ϓr _FP;4^ᆜcb'] vXJ9t!?:W*LոwFPc5DYhi c$ R\%}{jbsi꥝!/VAb<0 o JP&08*6γ3KVƚ|Ey[sG.dB U%ri*P*)тH.ɑ:;^|W'M "W1 XV!ӔbSF*%@@Iڀs3~SF2RavTVS-_R`+D9DvFߴ9+?N\Y=UrCH{hm[* mk, Bhʀ~u {g Rhttjq.Aa} ŲjIQD q9ۻλMw˅qeRLV4+j];/!j-ĪTf*" V}j+N-1d\//k7), W6v_|J g4Ǡs*ȕC1THѩ`ך,!m.My[z}7 zۧ<_:rfk(-v7/oLGkتzB||2އz0U5~P!,!.Mq<;cܗb͈M'Ǒ?);JBN1Vh[l2r]ty%۞е2Ї9*cɶZ)wNCI)jYQ 7䕩6EQ)#)b8LbMR YʤZ!qzl՝=V yݕ9w-kd;8ŒY+&fBZ]wf *d>F GlJArdL(9nDҍB"` i=$lmnų:6.uXIZSesJ -% Us$q& L @u~cn?ȹiy~nm8Dkal>N0wf. b&Bxk^NY3\NHhCalk޺4 <5*\SדYύ Ğٗ@ַ2:+l :P XsF f nZ B6^Zoz!gv`u) 'gEB,mF.@kƚ乛l&_r90H|1508cˬP 3fNchjdhZݠV>?xēnhUTB|͚d*jKrM9FLUKjj F/!`S8F@&`?d֐BP6$Uk8I `%td UA4ENҪw>#"}k-}E^b,Dteη)&vimԹ֪3NemBh: b,B;8 aLGl~ayʰ>[P^9w0ƽLnjS`7v`H x4P9A  <*'!avr=B۪gnYtwYJb`-RړVf@+,RL.iT%yTY'!71X'_}( J*r)yy[u~6E8.(P"p-B >bn֖[/9@Ŭ blu?ꡣ!gjbY>:2z2C@0ƚdؼfc B8r$5:$="$= ԩ"iS2%Ah3!\5GU;aXJ` tW陉V0" iEeR^ PJ+hkQg[>nu 3c'm Ԓ{rgG֝$}?[N ,fs:UG7;3cHػld+zݽ!6yyؙ!/0R'K!:_*Jר|:\5z+ac<)ge8Vq T hێPۉy_ RS`g(-sƤ q "y.N9CJV R#cane mXhXpӣvo2ޱ{eup) x"b׵n:::t4{~ g؊VޚR"'" 9Z&h&!q1薤 (ċA F>]y Q`RFiu, bt5sC漏^ݹ?by٢cݬcߨ:Gm7E eB 6I;(AgcV`r E-첥~3 y)X* {5!;:d'Qb|AT[ _:ux$-0%"v}#xPIX9V06c)uZH5Pe2'('8X" ynyeG][I%)ߦز0i@¬SޥzuU'a\cf%;mTF JN( 9*TA$0d ja".%v]7o BF{✏c䃴qF >TGsOeŪIpS e?:!jF.ѩAwHzLT2Y EFGV){Lzܳ϶f]jSaȔQ$ qeN1*ks1:M؊Pۗ#!j2fckI3XkI-\:6RHy >1n6K uwI73<)*sau7זs2Dp_ &RŜKcy`d,1CϬc3EeIqQ4C<}QV[cMRu$Ѡ M>qHSINm8y1@b1O#̃z(gսn"'GfGF)e3_Q+@I74JhýfN$Tn76 ݠ2X7U-cvW @RF ~ җ qe3̧_|42/0F %Z'$!25X)}#v4^X?m]9p{5MS7_ X ./ 9`vm\.6mi5Dpv?| $/@%!MF>tB"F."zwy9JO/:]>t'`} =i^O0z~9qMϦ讲?gzïqfR"b ,4?jW1ʢ4NxO\ĉx=`Dp[~ p=_`r[&%3'Fty}UAn_G7=94ɵGð :z'DUK[CyyXJx4rj)m`2ѿ)?f`>![暄)y2٭-ٯ%z:[ +3ۺU&W6PE;ޡz[_$üT-ڷѫjS~^e t9(xi"ߵ%e?^{)| mN@/Љ77 Hkk6S/ф #!̩TX/c!22 Lt:a2sB#FH%eg_\^&=tSqs3xKnV/l=W}PI],b5xL6˾Yq?N  Ds΄R-q/T zԥ)n:[{2j]G(C)`7]GB"{rmgpcY^8΍VCzyb2fԚ$BTg>2xY拣`qíMħRp0{b6!73Ir z; ˩T) s$(?j)(>'^3k}3TDx`;2'}[շqq+R:kkfףy(Z z~Ԃv{<_C#"I%O ~r"&.3*2:,NLR[{ٶraM˺+jޯ=qs|4O)i9ͧ#,^]E߼|L?M,!#n7]=*s<5>Z^We- 톮u}4#iZVHִJg¡iT4nt{%@/i+<+.3#3h׳6˜+)_~p-;{a.ՁPҊu0.rdp@oi_,#6Χ_r7,R9 Zp#G= 4Bˢ:żȫx9ý E %ۭ5^[TT>,|wfxfq9  g/ Ye@{͗*>+'1<\>` GYTNGk$L[חl5aXz!20 Ad?&(^smueVk>/y_j/n[˚?hBa eeYLR WqŊzp&3Íbq*;2W"R-mW](Cffzrɸ&{MYC})@[S3\@Y^ʥ\Zм]Hv+})C]xd\qZ~AIuo::NVh}lvߠϑ?D/ 樌+x}l ͱve55ʁֻ}kf<)WM87+$_Ct j2u@te)'$u{˂YCیR;v=* =vU<6]ՃqڇV]U=VAWv=P]!`+{mzhi;]!J;:AbH+lm0tpet(9+k+-d}*#"L&xR춈fjM`FM04pM#ZzFFu4}4M5>B8_.Y ~q]Rta0Vax6Ւ$g̳$[-Eb-?o {H pZ+I(s-=V\K=vs εCZpFmѡ=Q=S+ !4BLBW~ Q ҕ̒^p@ GfvuteTDWX` < 4]"]Yn<n+Ѯm+Di:o؎]Y`.[:ow=ꡬ# b5utu߮"9BFCWתP ZNWututń0,$ DWP ZbNWN8QV3E25r`r5FѤݹ5!&S0db; HվjuxQuTcuzR<[py-R`3YFֶ88;ALH 0•6BJReےttus+,á+vhm뗾h ]Tـ p `+@I+DXGW'HWZZABB&Mky(th%mv(9(j bp9 m׮ee~teBDW` `A@+Me%[mGW߱(KVΫ^5G:.J۲,]T3ւj4B>!ZzЎND [ ]\ztR>t(銃f]ںtRD29pWIK-wE%+'_v(g!ZWگc `%1!VvQ1cM  .%&B:AJ%::AF ^ =v~=m> t2-H] U4B vut-$|D0tpu0Nńg>]!\v>"Z]!JHW,+>o.,B;Rt;]^ cvׂ׮GJ'UhڕAWv=#r]!`+ˎ]-m+DeGW'HW(v  -=!uPDWZW6ZBG=CZFh6T`jX6;䍩㥖Vo&Z-9_NN[eVϪ)8]϶1T#Jd`X0= g?gm; ƙ99B2o٩Cƴ%k)L]ڀ p fњ!Jԝ"]iGx4JW:UV߱Q]5kr_+mJڦ|n\?((nkvwWh'z7şó2ֺ8}VHtq#>9/pp?/Mgr_*\y۟>n!i0^W'hJkhi;uz/e7~k=QQ*Rw5zz܍P,;]eC6jewYjQ ƪ CH0T;>\u;㉕LT'; SMV3Ғ`?4țήj|몢 TĜHzblҩL>5s2=@JD[KBuP!UCc2"H0jWJ=)|n-6pxj!jzԬnn'J6omdRSLj5b{,3k mqcùk3d)KNI9_}*P&QG1Z8d1k(W8vڰݳߒwPM:Nr*0!?rih*˩|/d1;[_(0N(P bQEL~.mۋT? >BXƑȃcFgdɺ5>hԜI4VU!ZJj[K9JJVcP9\VT_QڷbK$1)P;RcB5~Xk ҋJSh#>9* X -dQU6ҠBb1Cfզm]"FU$X'rij¥p0` V]|: )ܔ}U\` `֑%E]htGjO` ݥf iy _ 5PbP\H܎m }C*YȩՏU?/AZź(7YK1eBTT4X!/~g.O! =X}+tmm,z4%>by1ρ 7DRx З9tLI3wk ) T2{6o6]N7y:[iN7n8d-3* ԭwH7ۭlIqiѣll$ན*٢bqԶaXSc5Hs-'Ѯ b PNF~;=6iV{ұRpݰ(a!/[tmM1Wts]`7"Vh:0ўg]r%5  ,0eR 4Č.mAz|BA( ==`-\߼]oa; Vc';h4G6J1N>iH&-֓TyŴ*ax`R;Z[T1"@XaFb1;, ;A lU(?<6*R 18c@sMCw+7+3ǚAF %_6Ld4d, (R؅s4= }~sP׮G#+,57[A[ܙ1x dmAi YX; Z6\a-li`I}!?Д n FI‰45Qp2\2[ 1xF=Ap*N6fTS. N8 bC1+eq˵'dpi⹛H_`m.?t+ 358F߳ ՠk\y^7Rݵa<wQk\kBX%gaNW0.&#Aej5䆺ؽ۫CVjK[~:KvM)/ANn/l\n6`Bƛ^==NVnW'oސIh~3_eI]wڞ]}{ϓoVœ./±]߶cۮ>SXϷ5_r&uN*+M'WĊÂR_ΗW A/%9ƣ2Pz%x)O@߮"I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$vKq2Q$b@zxţO|}LM$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@zI Ѕ%%Y@ZNpZLh4:$P%IV@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $IrXaII gPI.$v J%&HH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ r@]Za_V~>]^^\}AqUټ?~E%4v^Bp铩{~I߸a1t5Zh?v(n>rk5.jߜ8|ku4Z]= -7? >2O++tܩ'xDWn9t5j>Z@ +|`XՀkRj tt +6v߯Ae7Ky=.F͞|g'd ?ӫN'OEvԊ<ܾ*HhZ3 7st@ծWWowRXy}o 01Xmα@_ Z盫izsXyʯ+kik;I ^^',ߚ{Lpe Ӌ tsSj{X ;]=MxˏbJ뮛^Vu5N>_sdǼ 1^hVc=/GcCWnK+E>v(^ ]9kË-@Go>~=]DK+K+vWwEWYv@^] QN8D ֪Kr(hJ#%UTZ]ERdb p5/T@ձ@b#޽V;OԷ6OKߘ|;(=sXDW-\CK@ +M* CX ]SVc +V!:khM"{k695ԵO8aA4x14=bhzG*JfHc(8Y3oZll6޵$" =`(a`dn`I &"K=8&%%["c&bTU_UW7Yt$> /4WO^1nfKCf1crHҩX/|1 V92o`gs>g燵@>5q)XNPm.E L#2fB*Ӂz9`8vn^9w#(6cFL6c#Ko-R`ޞw+I[V7KKy^ '6+,nJpikUBxUBygDV+uWV7~PJ\3W\I1B-2W hJpn\=Cs(}U,ukU[î-wJ!:s ͕Bk"s?[c\.bZP{BcC/jN x\ydscŮvB)v\\;\V+jJp5mX",;s a\q"s51W O#@6\%M;<3WOb(Қ4vɰJ=aρb%۸ov򶼠EnНڥ)%-$$5cJ:9r Ʊ$E*V5 *(%*3W\qΐF-2WSs2sЊƯ%w+\)عݻբ P6nog\In֘Wf9?U*zJ'>Ps8 m1W`UBɺh2W 0g1W hM*m>J(u+Ы>T`, ##ŮvCW`Tgz,1aE SZccOj7zN(azH&PU,QkUD[7\%M{QIg\Q'7}RH| }DEh7oE١T{L[  MDZfb6zUvrWw4՗w#ۢ[`UڜVŅOxpe4)@6…leB71Tx|t%#2\*밊a,%V5S*%dG(o"h9?@B^傑%cUcG6X^S|Z)"ӱ4evVxPgI Tx=Y}qɽ?\qЮcۻAS^A fTzi'eș(E?{6|Bni-60Wh}*:ʔJYq{ Dى#Odt%+'ORAѬZ(0^UW Oz_8'K=bE-[F¼wiFChaGF-S "˓4y@8PJP50Ť5 .!mIHh P2%kqo%:,OsR^淡s;߽lbl>4|WF&֭sܔ5>n>wr'[^gSr75nٿ4LYp07 0smRh:N}p^vUkL,.PHbh՞:#,RFXp4gȳ6Jw4Mmn| ZuJR|Set?q)LomU QP=XG NIr`V)5eƨ-P0&fL{)$8ytGE ߣfCmz@?EƳsfSi4O/Z##&cK"Lj N;N.,RλdIv>b=|l%L;媙h 08-@]t+/ JCpo\kz;KG*#R"%j"9̰4Ҕp0AP4@2AjHQ{pc9 8k LVm^"B=a`E S5"8'BchHN"]sr#փz8oLY)TV2DO@P։(% ]'"լH®!;+- 1D:;Q|zO R i3[Qb%(&$?(#.{})F1b8EHKaޛ|3B 縌Y>2. }b0޻wC.;y)?V"$) y~Zh$2z&7$^i۲ ꥌpLL|4뻭*Wk$l4 0pMra}FOS~uw~oXϖNl //q ?}KuVE jLدKޤX4cRRS{Qp\͇?2}y7@A>7YOj& ChDp;m1joU5UvkI_r_^Y1tr~V6Րs=8L$$W9yyh[CTc-B:bCܮt>7! 8~D`S\8?gzT}{}Cr#:J= d>cQoq4B !EF)ZԸQ9JOqXY,WM=FI{.@uu GGւ)" _ǂQ.CnaY/.ߍYFjuayۂ]f}O9dy|rM^0'r9Lkejv9 f 1 dɨH)h,C+t}*:ƒ[_}S"S  *\r,.HFR`d5ALE{ >}XӞK)b΂9#5Ӥ6dZ|uhkp!c̬>a$\΅|$R4܅s!E\H2z K0 t YB0z(B`T".!I: Xv1wa|YB7ڂqa[pՄ;D/haMb?YqҞ[9e;~>␨Y߯/.1.Ju:h,$0źHt|H|U(\JBjp.p[$M)c.;_3I)I.lvBg:}.Qi/d~r=EMZZ毇Փ9ii}.t,Ԍ CC7&%W^ZBƗW1j'83IH quⶨkf׷+njcv!kTuaugu}E=o|]-s\E`c_MY"㯥" l粊EJ`YE,Kmt;SEU h`Bh|BՅb ҧR֣RVi#?71 |c)J!JWrɐT/bV6j,MesSխ1TٱZP&㐎"3II5I+F)VqR: Ҡ4w@' lтzf3/+dhy$OH(}r3TV% >mgDJ]%Td 43A6,eH`6۔:$ɼ8GY/t7B.Cq4 Җ1[+dB9@MZPa_xR^d#.ƒRiO/3XvWRb }- /A-\N?iqL x&̞.4aj%H—&B G+jc+a iZ`-W6xaybY)4MǞ`䕠< IhUz] ZG/xe.o|Ed+%|^/Wm|H5o[$Хis,rR#V2*}sdTQ=`c_9U;xe"͢L'oxtGԤi4&_w $J& a{8%l [j|YLsI LBJ4:B x=L,2[g޽гvwYt%d&A0֤LBA!E^. 4ZGXUyI@ZH%:oi7 yLs=Wth~0== 煠0aLuRq0&@jDz*i'2XX\xDI1 Pg |{JbڄcvU"h,RJZlOI:%VAdO0 v *[tESR[oC^s0ҰKz>BSSY>?Lկfhnv Srd.&HlL۹P'**I*Iurg2YB D#F}Y'M$,(:MJ>%JB,wV2bt&{sNz.Qg5qJkDO /b0ef40))L[ 0YdG ('Qa$WU$`NIäY)RR#$< XYQsDuPLsO^~T''~좒'.?g.SRL׷y Ơ(=| ҩOϿ]Vp8bv`1 !4RJV4󠓵Jq`bJFͥ!ק1U$Y\IXM.S97)j眔4ms2*laq-TmlYmVro˛;WV'[JÁ| \F{4~2MOa'#ԩW2drɳ750&2 +1V%1gGdBb66!hFh2v̺,H\p:c QK;̦qծ6:NںփsV95v/4VTZp& AR~5*7I(aE@2!CELlML H¢&H6 I/:sNƾXjTh*[D3X"'*A'2*@X##3f $@8):j bRdV 8)emD.v&ĀNĆI (z9KEJEV>uV%vq{Y>S w[ew"c@te#N$gf֓?Ua`ab졹`U}=< |w0|\,Ӈv<}ua懃~ԎY{8Szc\ë`iq4Y# xcnJE&#! R\Pqzm4/F7v>_Ou}~7%(+>/괐*ALsduij @#}\$eDlut*pN$ ťy omcLܿ^} F.4":-])hFboFߦZAկOݴaʟjyyaq}BџL^p5Pö3/uޔ^=+K_8BG 5%ү+y-_IZ#E)~_5cJ3oLo-pJ56bjM=[Nd]:/' zS¡!&u>ȬM2f)R=&2nxI+^Rq`'6d~~F|}_v=z= uP{!27s_]^~0{Z7ԺҺY==@6V}mԲx?=?g=m/pnj)GOnY3/==u Css֛x}\f{vB Rtk=a)eo>h[ո͍.cN/Yj@*ф%>:=6S^&s=kuAmN!- 2؈6skDP|q`,sQ{A1RZLt i!emg tIP+EV&n#Ȭ+9re:xl׳>R(N$9K J)vQ-!=2M_Kj{R0*1Qp;-& :gv})$uXbQК,""ƃa np92ژڔ1:+47[J#hGX\r:Y0pYjLtX NjBrJwRdc9"/0X-}mR^y BxuJttoӻ%*˕ 칩mLo +70BB^<g 8#gy,OSI)x–ksXv=$UaY%dyZLPt. B0AiZtp(`Pj{vpXMBVK5xQuO븳݌u&;P0&(2c*Z%@bp<$\$S֪dzAcŀlr?/qNl4itvp&;q꧟뇋&q{8e 8x'~8  ˈm ]k߭~Kk 7˷Ӹ9~ʸmSߜݭU~^ 3*~O70}{8޾fFZK$ںh}l:*Vy1txUw`f }#a!.|+nH=},"r)Gv-u+!o+?$u- {CI,%zؗr7ݞz ]U1=KѾ"Qʫ$C/B^0\E9nr6(̽¬~ۃ(ύDˑ egs>(y>;bw~sܐ'.'LO}3˙&h}7AOn4?+n_ (kl6I?9h̃-xˉrwbs3/PRjσY:kx7@:֕o|0߮+giIjx@Ges3b5LֹWGf*mSw!hg)QceX[:umi63gwU{h;\\`;"5$eΠjƹ >iC@Ŭ԰FTeѳ&4Uث g['|%cdx`fW `Fz1'hC M*#(˅ѫb|,fޗ'ƩdOŅU8{T?]Or_([Jx6fƱfXg" l;r~(vﶳv?NHjNmgHdDXSd&zPOVs΄Rx"46XC'Yͭ/zJQsF%RvgZK?v9.Orݾrg:{{NwʚŽ5ErTEDL)J`)LVB]Y iOa3v|?$n=w<:ټڤzHS#Ma6F$[ClpH%2PR! fU$!Wd d6dv+c4!d r ]!ڧEjB:05lBs_aRGT9l?H_؄{3-c/-_@KBx/WP~ITpЯpP`rd/y-~WPVkS[ܬg:zmXO~ՙ ZR_y"'!GkaNF˄ @6) мxp}RAcƐ Sx1 ]!\IS+DM P/ tut4%uONF]!ZKNWR: ҕԕ0_Ip^th)}+DX0ЕaT2]!`M+kt*th}+DI@W'HW MIBte:+Y*thm%!jυ@)zFFiz`7UG:UQWQ~#骾v^rolpW?{v6bV#/?DG?MCS%Z̵Tyn\͊P䊗43uL1xG!vYOױw,:Ÿjw9&aV+̫X8rǔJ\QVZVX[j+Nj /#'d< hҫ8EZt@520d߯U~GU>aaUhKԛɝ蘏"TK4FUHV Deo#i}VC?"4E;1l({Y[:zta>ɮ,_f'7盖벑=M\f_l6WEcܹ SeU*oyxf%WdSXx]bgn(*bXd2)hJݳψ{)F`<,5Jbw9'c jCYPBے OU⬂wT(s܇3!tku;@K$, A2|>_KS0>Q vߺsYFay̅'6/daIVx M,NlzF6~SoiQ퟾__5vD-Xx?i?ħ[q6qp[FѭD#CҁV7ubU{I`|i6Bz7#HcpSI벚\: l̽9LEf=lggw<[dZ}ޜ]o\m%7tyEU>$;TQ|֫+[o?de#1ʭByryۚk֗Y^Yй~>ԀxqpKev'B&1>C~lS!R2h@e\3P|Va!t{`yHl |Jk  P,`4*{X2ip #]xo#͓(jVR2k|9Sf%Nc0s(j^UetcH}9lSāSi<-2Tp[ {/Xit2"Ce&e 1`*|h{/E|+/3AG& #gH\:Tl8¬!K=44nv$V 5K\z[M`-I24pMmceV 4L4ah8e3CcSLi93*&y9w68 gBD9\AF3՝aCwfuaVs}H5bb$xOMV[;f|mlۤLVjp#Zk>Kar)H %M'G4BNDi@W'HWXt"\BJKBJ0KH]!\LDx Q2 ҕL)]`};e$COut(JdJA9KЩ~M@)Ƞ>-ڊU'F[]u+ٌ#J3tk^ 'DWXCWרT *BNWJ; S `N>؁ JwBJtut)xʈqmfv"I#NJn C@toMݱncwUu.!%IGc \SVk D1NPceM0=RnpU2S"De Q> ҕԔ]`QWiB}K4ճЕܚ)-3l{(B*cؾ tut&$DWXd RR+Dz?Dœ]]mNI]`m2tpN}C)ҕ5LȔ [ ]\B QA]}>tZv=#L`K9/A7O.FW]PJB{jg tkScP D$CWT rwB tut@$EWؤMt(٠N8弱GMȜpjGBrR0.I2ۛkQX̰%0:R=_jm;6<  Z:ئ Ѓ6{AK37&; >Rog:zy! AE)1*x^h]HSȀN6[[XH0 KB҄XR̜{a7}N( '8V* K ]!\%R+D{\wP62 tu:t%a$DWXd d!-'t(@WHWPBXBtN&ߋ}[&ၮ>Kc&7LBx:^\Ɛ]NVwn~ϯfaq~[7u~r4Jk~-x(!4w\pr0/eP.Ȓ}gdt)WUXH4?Nf򿾼Xĕ['튞Z8 HM\_hf8[:,GhU"5c =+Ҍ])BBAM(Մ`E, ' vh݁3E±nte q}[y}Vc-Cy&~?^~ t|\C JFl[m+-J)xݟ/g]MI\D{ Y:>L^Of ;\.8\j~7%9,TLjVu};dHbUsMw= lGoɪ~y BA968]>FŴ*")Vh*+,'&&iec& xa6 o6~>b ́O"!;ťc 68AfR()CI=4gA#4j{d8h/uQ5lOMd?6vS|mKաEM+ 3dmJZ}խ)H|Ie&ow즭?ڔsW":bS[>1:=#˶6E*B5BK+X \J*L42%JA胎 ϥaRkb^0֋g ULZUmi, c`p+[$HF '>R, y!?8SZ2J-yI/ 1\$2"Z|M*\ |mF#qRgyJu,XaVS!R@ 3tC0+akx'GPP<*1hxiIUܔ9G/CIWTjF:X}}'5F4:%bc=cg|P~k rX~jو*327RYʾt3ɽ,<ꢮϭ$?_md_?1)©Ԟl|Ery7},.g߲uf8]G@0/." \.(ٙC/+JXrhp\~l.H>Hb/2z 5[E~읛UMˬr{\ S/9!&g cw0 =;pm2v _cS_qrBo|?q~~ۮAVKOSKn϶~(U}ݹtCev=^n+s~m)Nl _/lzOV',ǨH~w  iǥ"Kl$gwEԂ|amPQʂyVMSUu[+X0 Urom6Nڻ26G$T^l2C8͖p>(n \{=it~.C8mgCo.#;1Mٸ2_É#~xz~k?5>:5"8 'Z5 ]{4fpņmbn#6guGmSF+!U:H#I*eXcCK]V{,4硩VUJAgDϥNj QpF^ B1]T]G0=uV WSIKV=ڛ³>?Ga&o[5[SxRHRRذt0) :nB)+x.lpQiyJRHsߡs;{96k]z@$Bx#--BnZRVJZ0 is\{ ¢|=~5$[V^?\SiaV1Ecѫ`|V&l'{h [r<*2%U R*(ô,"BT\B|<㱎7q5R= mٻ޶ndW|iű-@?tmv/i~ 0VkK%'u.#ɑmveQ/9> X*+^AvB%*&ĿI%gB4&jk"*"qP!};DX@rP.ָ"E]U)+' #rLؤ$ 2c`H i۲>"A~K^E`.I#g*@ؐ0ͥ,Wv֛v7Fx ~3K#>NQ<R%*WWD^|4}G8S3bO2,bHכ1NCo5?On.kO}B^onO]#ukVß@8&uq @kkI ft/fsv'tOgf b D6ƚKq8Z&K~U&EArt~LybtzG%oW[>DOON? NΏg^wqoWJ.tb뀸bxη|[v5t{C lorcʑrO~@d—0 E IՕ8ud%1QQTȦAɁ(=ʻt5GYSjyN3[x,?TVJzW- ]&7om c`5Eg YJ]L.i (s B=f/(d @HB)$_BRhM*k_ C!剠02V?%pk#,tI0c9uLL`?DS64Ĺ.>AN L֟kG`.N4Nvq=6Mźƶv{,EÀW?^Հn Nho-R(.%9Ymb(RDbJ"S* >O3e~GeidmM=s^lh…ZWaII+i]TEm)$@bz/Ҋw 9}r6Nn{:]6͇ɱ!ZG r>"las6^ $Hqm]eTݘϡ-o?v]Ȗt`l[(US񖁻~$jМ2R.VICb1tCc6.Pr%8ST IoҲO[G[Ј}vm.(C "F&*E)fkpFbӋn@.qEl'?5QP& r%GJq2 4F *Zi&}"Ke\6튼Ou,IL2q-N-t$IWʚ%d:eVTX1#$ɘ,rYjc,X}E ۙ+.8[ 'c"`™@*="DV .%g\S; P#%V"Q-| q/YA2 ,+:c")^5f/zX[/M@JIF!$"5i6bȩ85=w S`-MKG}??*O> 0d@g6_f\+@T7DU * PJ' k LJq< o閷|Y{ Q$ M>a뤿lQ=yq1I?MO.7OFo{Ǿi6{<6l~vB,e~8åy5>דg.!OQ4ɛNrW: "+A$$7( }}CH2CcMRR)-裉Ҁ1 EE1b.(&R)20AX SD$$wΥ"c%]hKs7.)2wGKv3v x>؇y%AIpp 9{/}:IܹZd:p~/f<-z#]X`a\u2ɪtu=}D޾R1.j'I1\ݵԼ*./O{&Co2=?f›x~Mõ"u8!yW`Zt4QntᓍdYcU!]}",D-rN}^}kѓ]Q-TyӾjK=YZZlL2J_ rGꁊѯ<|7+(}5NNov󳋑0oFQ =]95kjll--VC[ٽ̌yy%ձ<-QUZeooZM4:F!:о"'eCZ=MkrRKcNKA"(Xf}fR^+pM(0{'FζYW=67hȋj>U1ï=he'T]a*J* }+rZTC/ȩRwBWS.r;-gֽ,6 JmPCPjFJ ${NQ;$Y B:aN6 x_E1drҘsJw)gdSlѵNxQXŋ^ r%" ^GSf3ݭ.{f7.ӡz Pz4i `fRHF粌SvJƤrΉJL[o0l')][Zl6F6+fP\jlĹAg>hq< g{uܸxx8nst:Jkn?:9|qu'U1T{K?/M!)om&0)AӱӆGǷj@]Roƹ$%#8%$JGeW KlTkL=c;6ӌ]}n ?/m~^>#YZ<p|<}{<~:$1qS R<H(@ N(@&^S[۝Y20TTMel2 f+}8_$ޔDuwf[sǎI^ⵛiǮ^4fMﴗŰi ;6RJ^&ۄƯ1&tb"\Ɔ @fIe_ $]2 G!3$Kuq3q=_%Zq_b_agAPN}=܃ [DI~j܉}#0*8{ !XPچ^g.WEM*qkN.2ՂQׄ3{:QqoӞQr^NY{@_M'ADJι+YR+|6`5H쇇f6ηsآQqs6P\* ځ}; g-vP$:hMS шfGwq.11]OZ#U!2kEmط D:q!>-TӒEYG '+xTWK ^{}<:oF-p4y <#wwn,rf۳}'-{t Ls1fݶP,Mu _d;Cf"' {ځωоt@[ʊci]Qm LQqe/pFOdu9"2705㈍\ 8͙Zn X XÕ ~ذןx59g͉'% 7ӁN:::F9qzQu5G:[ǒV+wז!@hAh)Fk"pu!ރffř@L PU s,P뻮ҙ8; 5ۗ'"vX Vl}=]\`C[gt]i6/sOs ΢OXH60x )2|b1J O7g<}i5=yiGQ%)BkBt>@;= &G̣b`y:<[3l9&IJP]Td DDO,.eH5M^s*9h}gZ>(}Bt2&=3]yBv0YFM 7N s w:Lq+ƉYbR1Ɠ!(Dne4 fC >dXZKS !"hJD=zgh p<1Eʬg\3.AN)? ~|q3}h5ŋ0]sg?=%G#q?f\b1xI{(ͳ˨YUuuP+B]gnV._BMgTGOtX !xQG$]˝*-gL˩qڌyr$#02%:8)֬xϿ!@uOgO@Kzz&h<9}}~6 _~ouWojK[isҷ8?7s7v-_fO##њEsb<*U^ZP}qͿSsy)B*S: B,30Ջz`=B'=[^|~81r|_^ږQ3z|GǏQ7 ,uV=Md GIUc5ƮN ^U?߼{yNJ*x1k|={ǜu5'|?~q҂ޯs48)R8C$d.8Kz4UԊ8 :ݫol*T2L{ᤓ_MU4޶/yZb GbSTEǜf*gP鄇hU6iPΉbKU50̍3F}1~ ^:k_iH*}@|h$Hbb둎Mω;}e\'u m͌ AŦ 9-Pe壼^{b10 kH]BtRRngш0ħ<&>qѣd$)NddáʄP=|Tpx d$NL&N7h'A5CW: ,-V;mO7e'd"h1RxgQC(L g-b S8&rz);(o7۠4OS9 -t,M{6'[ôVPJH|;7o@k#gF,ʯo[Ou]x84<ר J VDU/W]e^װf-{CD.>jn's6>DF)))KF:LDOm@'r(n)ѭ{CphgRi6r:᭶&H]Ҟ-U1OEJخ㟺gY/@',mr-=[w’ާ< ~YȳS+Sl:|z]}ok#oT.$iM(.dJ"!+& \=. ){B^) X"i3'Y4)*BDa&$cPTFnb$FTC,ލj@G*!:kqAhg $Oف밶}asr gz͎cESO c}Rr]XƮz_d x@xUQYxN;1 nBtǿ`#"%Ut%&yOCL!k_@/@tdMqvNK! ՖB:)8X0*HI{!/z/dU{!g~2|kʝj5υ^fmȨA$)!Ri5 \U8L4>! j2pe*v_jw=5ct?EY[\E?;(J8c.R޵Ƒ28/N;i gs6@% 'AHьsŚXdKj*~U,~y"c E {;A{d@HU= ԩnPO³m){ň$ڒQ9 ~xrhaiG?ҿr/_DwV+ '2YM, YEhf^wua9璘2Y+;$3 ;ity.>hJfA"+Ye8m1/%[9Ҍ/?K ؊5^(%xd\MxWD(/"#YȚ Y=0GY_DrTє9-d|Ef쒳"m+ms!$W.ؗ=rICe&=I+g*Br޺:朮6nϐU?Sn6iڀXKL4/C-w`cbrzlSsbK2}YRQUYD53s_\WSarwMO~7L׼o)yp*ŋA$M?lOhյE͒U0$R$BrPt'~ W)٢N_ntz3I貏Խ@&g7$OV̮՟:iͱܫ[ JDln}k/!t Jݻɍ{Z,0#ڇSᇋo;&dNdz2=>[@E_Qmf[F[GN ##a [VLFO䧳_y|rusRoUQ7mm%ͨAk?cbLjD8^gvvN-ߗϖ'x|v_oӏ߷?_~?} ߼V63l}C jho=wS2Lw(M~Y|ɨVwC&*i=gyᙳ؟, <'b-J x>|[qf'$&3xm)u!;}KO3G&.Z$J>JF\61&%Xa6(=wam]^}~b'{[yN B T2/./";`xnB;M6v洸8{nf2q#{5vlp(В6nzC|W9L/$: ٢]sw3s );5IU#."$7W^uIZ{?V<ɤtE(2Pa2ڰSH6I *-rrZL1-8b,$c ^C Ř;-uY$L7r| Qn[!~V[yݜ%w([IGz/Ep>/}ZJ_M:] _K:ۧ.uڎ%U>I͗t^5>.Z1 ٠:tC ]Mo=z(/t*{,0ǂ7rPd4(O#M \&QNk-twBkN|9xx9eYhn#~!XcO.|.%ӎsTBe꼆z.)}aYq?P8!*cL .K`3'5(%˞9tMQ#DԚ>yV-TKAhMB9R)XR *K6Fͦg~N=$ƅ ~ ?>* rr `<MdtG"{2  fd+vDSE%Ӑ% ╭TL%N$N*v[={j=8XQM~E~_a)t<٧$&]G%-\ i8'ko2fzޛe\g.Who2ײ*'%8rJESJ(X$K&hM h^)Rf4ֻsI %$Jr 灲/EGG Tkl:[vX-l&-4-|R[QEE[&{?qUrlrzb b0Of(&G E$h5R͎v1Tg/f *%d5::Y+I+ہEtu:zttCͤV6v#%@&bYڣDe0H9ZТ=ŋWኩ#Cfd(4{Ѩȱh%I#Uf)Y/EM~{;kjbC-klhGHĠqHڰǦ%NepQ c$0NʁGܶ綜fߘV"NRL &KVbCuFfӹ"^vqޭkHfRP]΂ITݢ8Qir:&G(L(#0|dJ3j2]| 8{L:dkp'0a[~/_#b6STeEGܘBُڃͳYWsUܪtKD@$;m-uɥEB_Zj,ݲ[mHN`@NQ7We>SvP򔌡UHddKd bD]%A 2{Vے'($))$p褣uJ0:l:{$:-0M6!9֓0 mՔ@9NRe._V!"(d\"Yc\jrE0S&h4Z 4jn3]Pc=EԊHɄ YH+cu})("b<* oJd]W0`A:1H^ǃ\U 4≜6Tc=k6=lxq;/WRP$$Z!Q>5!Ō{eTE5JßSjT*[Ng)H"OC  /@qD"UAjU*PtF?P@ԨIT KuMT6d]˚)ox3Fx÷CxeZB^X1F8 Q[DH^̋TVU1P6G)]DV+Nc/z0gOA[۔C=R!ZZJ&H,"5_&}S%g]q1j06$!kbnO&]ߤ[|8z QP c %4٤Ú_m(ŐA9Ҥ.Zй\RFXT&{R&*9 &*)Tч $)kRRJ]1Y{ɉ8\#L:]綧P"k7鐂Wތ:yـ ؗ5%YWR`ٰ2Aa)𒄶:\Ia-|Ayk/BegѠ˩qͦ >4QY,Җ}eW6 0YR( '= '/׽ZtƧGeY)9- xW*t`+f{eIXFpVQe^L1K`fiC45xѱ uNݷm}#ep|s/st x8G|WkK!-_3$V\ l70wM$&W5b{hRwx& nWK^2`2i~Ƚ^rqԹx[zW3zUƶn9{tzml[{ ^[he.#z!ɐֺOk69\hECZXwZ13rJm=m]s,i;k_Z@I5JYYYkKRk1d.e%j1$Wʶ'9//fgeW]cΩOY@hw׵~~o,muǭetS;f!m|nw=zj|JLiGZN}d~2U7s=H''2o,hotl]{Wmop147E {no2J<S5m~ſ׿3lᩫ\{P]"zzpzDZҨ6c"_3hJ٦$YX&w1kL`_\tZm$nb1U1G-6ESQ§Lֺ?LR`|BDcɿ2ЗRa';q6mC,}J)'9xH"iS%r55 ]2QY,XFqZc4H)I\/}t_*}tlu7fо6?1:&XciP%ZΒtLRƭ~]0A^[}Waޤ?˛.&NO|=\ Z2rn@9Iq^Ljz:_'w3_޼'bLSe7ޚIE<{Qd5P:B.\tCşrFb˓o V}}-{Sd+X|64^zvebl:َ.fP>|γJ RV>d^h`SWIw,_jOAKRq=->~y 0$)9]2]6h?Q`Zfmة-JTݲrDEx0%&(fw/r1i~k;32/ <{;K^ׂLy -$H+HqfO(9]ߙ%IfrOw:s3vDz6e!6df-=SH;bYɫsj/b^|ߚ:іaJÿgbF]mr9 Gly O%-mn2 "tV6}J5*ФkJbPģqY#6?ф]`YnS\%Jc~3 }:_ 5 s%=!P# 1&!\HL*IL`iČ>1! 5thwxv;ߒYw2 W^ Z mP3E@ Kd"R\d/3 !?'=jG@Z…( m09'{(|t1fBx-&y:-Ϫd7˯* %Z\>h];f63 {Ya S,uf `3s2=*JGޟ/R//=!m\DIU&1n @΄`s-fhY< y!V;6*}Pr}]%+Wc/*A+w?] r+A@hoUW @+kWv=+%׳/ӲO(%>J-WطN|:/s^Rn 96bsB;>X`afjeLE//q(B{cHВ?LP2vPGJh@lzMa@ h 8-i1l#4۬Hn $j3c۪߷,OfkGB2ENz6Do*(.6z@)lBs Z P* p* XEd8"\B6xјd&KA-(צ1Z^uFDcUcG\ckJyY Pvap)A{ Z~')7fuV>t>{3 iCFU<Y5=WC1ΞPRAe4 י>{CaSb+t@8l'g)Oc.*5X@ '_RqNWUsqI ݆O g7W$oq- 9x[bm>7܊ eno?;nÙ攳bj%bt!^QzPHBa5 XǨ :W]j-mʸn]70-nYn5XXLjAtoCEծ>Y&&VQ'!:cF@1gz4‚TFг6Գj1и'P+^W89^ՀWNۼ?zOI-ގ3lJ'_rQ[aL,m1MK FiΣ{"9ۮskU9nt!+8||.9BUNq)4+Z##&cK"Lj N;N[TsnQS'3#|uw^7OW关yl 0)>n[gs;g(T뽙scٕM'z pkiO/麮R ֵ#[ DzG YL~&z4s78j{%^AZ7VFWO{3PH9&#_1K|~qhpFQ8h+ng3t׻_^<{e߽xٿ.޾zR_0iXm ;]|fMt U]Snw19&G(Jϳ7Ψ!f&E`IJHb_=zZ•MNjr-J4Xa!nr!jvIws@XO {S=ƽ{M|7JFe(d>gQoq4B !YF)ZԸQr(}mC]Z96yٚ;8fDY:DkE^ FDG9L;x2}kklh ' heSCZĶPc|1AOC0id֧MрmK!Z ĴS)h^xqӵܯhd`l=D"S A:@U¹Y]Xgz㸑_9e؇COX,ڊeI;#Qog4HԶ;A7{U<:UNU5WHM@ઊ.F5 t*!X+ Vu+F2aR7sI|棏7# 91{\J]o !mp^Wl7^BUHP*C!\H5kZ9%m5p g5)ofڿ_nr :tYq^)̽eMX)t7< {0]5a·!E.m!‘2r,>nm7;H-|*[dVc 'SkSS,9f1i,BUTPs_%pČsڧFN0buL:cZz/sqWӺ+a{oZ;әvpY+ffBRtwf9aԷ< 1Og1VB:dte(У %;S'~MB QH? Y%MmpT6F;Ƕwg#PԳM̪ ^?Ltlⴠ%ec]%+YG3I@4okg͜&.O rAm Svı{^D/7]Cv cx20ce3«&\^ݱn89D-Afw$eW+0iJ!f"]KsדJ)YO >= }-lh2:/]y#-#T"g%fe B VV?|k![/~Zq=DƐݢm:Xxӈǹ/5=r!2[ u kQXhc9cVVT՜'!n6evS|9m={}V!{&MuazW9HQ[fBUr[.9_e a xY؍{{k!9#yVQȡ ~s,ng嫳%?\|鯏֫BqV%-|*~1Zگ, %g}Ŝ-ZS½6MrraY"cfC7MrmEro뇼},]Hwɹ,Ȁ8ƆM|_oևƫ_Ƌڜyuqv־kNϖGu}%?O\T { i{]쁎Z erkj+:hrO0>CRr;S%npYC2Ŋ7Y) NL_y( !c.+9ŦVKFzٴU^Uj CT!h Wl-1'ZϐSAQnM[ ‹"$AݴY*I_AmPsTeZw.9*Q9V*K6+r@cϞs/&hݴŪیr UeL&VHD^22&U"'NHV:] Um)$mYcedO&rIٶX*d;Y7sgӓB_Ɉ[+QFc*I70`Xh-Mu.>~-_ a$SM)GA:2*muYd A-:=y̓iNUTB|͚VT-E5S*` >iJnj Ve(1BNs8&@ڭO|% !e|aڨv1JXuN+/1j+hբFwgDĶ]WE|ݺ2Rnvʖiu1d֪Uщ ^*}sZ T8Fc :>9wt!;8 zj+~>8066MUH̹97r㼃L{+mwprLWsG]4%?AC,IeHs%#t~&?,;4a\{.0Yʎ8aSx]rM&cTYy+@XlB.Ju5TA (Y#Us&:n܍G)?켥[o|XI<a%Wⴤk~>+:L73y!QKja۠ȸEM}IviHD|sucd* Vr>A.%*RՠsFt1aBa3 %BW"`#V*[l<7ނ'uϡ*^U1CJ١_oF=x8.u[ ]Ii-."(;@yVbl/?>jtUrH\Tr$etc,&kሎJ  P"Q ڋDjpۧ8I0VhkCP:;j7sv9N ,py>ԏn䁩QIԏo[stJZhn_>y:pcv`u>h[L."gCb#ad[2˫Wƹ4٭<4.vvqs\U[:.҂wRAJm#,*6kUDSq)Tax^E BV{?c佴q& >TG{fczoǗ~L5VLd;Ѫ!x*vRB1nWSK4J4fĆKr.%sh9iz/ Rʘ:֬j;1|&q!hdjl cĘr[v)xKHJo͜{+n锄Ws#iMB8R/?:oy<M\9I&*SlT_5yD(DagKJ-!&L6 e9VRJ,1Przgd|fg_SLŠ.'&$8GAx@SB煷Y6"T_8FN3,xCQ'RxHC ,Zdm-,jVi3> |^Nf:Dy|Ykayb&gd#>1y~OߊCHU <&?$,uh[c-Ghq؇1n6hܷN-ΎQf^-ucG^#^ _GS}E=ָKuΈV؇&ĤR[9OS*^b  (e`y'څpv+@M(`;z;^|޾3/b;>ZJ6T~_,^-yr!׫_=zthhxw{}ozX+ll׽ '.ZN }xru ₗڵvFZ/L .fEsoOk1@ܐ9 iH`J3ǠLJ˗YwYʴ*VHTնqQ1Aor&J9Zmѓ6?NgvR[8̙#i܁j6[Bg].>]sp #+A-ʊeW-aI|8]1g_gC{j 㤍h,CZ:0C7Mm׬[m廋99ynB16lzx>4^zڜyĽuqvNԕkNϖGu}%?OIՃgMv!mn_Ho\-,?J\^.6/{}=ǣ[:е8D8:7B #OMrEJno}#!"UHpKʌ.֞[=á6X<;`(v\Jn$r 0*,5F!X" *Sa;yLa&dΟ3ɲ1n_uw>h "76PsUZ Ȁ.&Yus(]?қMT:/=,*9WixԼ(JXiQRfs^߀7ߑ8=U׎f]-{Έ^J- k  s&e8"DlQzD8"\-z`x`*m֌zS*#\rϥ43NcpAJI`{7bx-0`"{&Cv|bh }g%0l\o;#5Gߜ7ҔMwW-4T %ueN3xPڸ X[@/`^Qz,U#Ʒr%_pT~nrh0 3ՁI^![VKwNy `r3ػ*ުI>k/ņjx^Z;V=-ILCi]k[n9х2,Z~&|-66wr'f09"Q( ˜[b.Y$ȝIn{JNnGf*<#Վ,5:6{si)ҷ)7EbpŝGǸ:]̧aVh "Rgkc K]dX;ZR|c '(ksj QE57V"FMsFAXė󑯠+b 0MM=ܿ:rǓIr2y"+~.a$H]m\Ƈ<_3Ogo%"wz5ycr'>ց`P.9ҐK-i!?StٟeU(:G^4:v4gD\+@לkX)%+nJJE@8H5:x "*r̍g&Z\v@:Rb&y$ ViǬQFYJy>%&zqyqli6G# mS^nkKxdr8\`pVmvUWӫym">fBm4#``'jXXaDU *ƨrRg棗JMBBX1J9Q7HgApYia,Y.= "VG$*`ppVU#,lAz(7q"Na%DiF Zh5,D3ƹ3XK9yu2- {u@Et};8Vx~Of(XmP%t+ܮ͵1[>U1uV{1ZYH ^U˷(AN!(S vY4aR Y@SaknXewfh[r0פޟ߯’m}6dq:|*KGe mgY5 r#i˹y?8D |/"?psFnضrج5J蝇mi=Eep|Y^&l81j`eq"qBWaMb?= RSpPrF9mp8WB&4RkrO+˜RY삵"0J!CL'gIq7Q > ==ۏڤ AlC32Q NsDr VN#(ؐi(QT ҭ`<>,b HGf UO)|{bZAHDoϱ3nq+8_v<`سO R–M-Ʋ0/< E_<6~@ď^Lzfw~QLޘUr|cx-cIf\WaknjNz/&.mxcT1} |r7,;- Xo+cM .Xft9sއ1)MJSI#v ͽh^(!8N1`X Rc$ĔPAD'qAp%0 %2HRVa S띱G0餔豉hj@Ғ-n:#M Nﯓ¤hu{"2=ִd+[T%'ogHҧ3%1n_x2+r ҭy])ăoxg@s)t8n>q4dI:n5/ wyp3o~="ն5?vs ǭ-WQ~K~]`sBɺR;~:)3N8ReɭW =Z'+H\ N#=KO&}cHOT {?Ct)!tP>^B+bb>¨ f==1^꣊zarђȔw*b%4˵e!f^!GB΅[?߃Q}brvNZJǤQiT1gU` q2mV2L /gm<=\׎f]-;3;qKSX KZab"F'Bk眱xqKfPNHf="Z0`6ZkF)RD:<(8)%Uq@`e%T8 )qB.J8lNJ7b΂wDðr9- A=G!M8ywj|[_[Pb_3Evj`CFw )?EOXYꢡ+M5 D(BԎx b6zdm/'P/ N:F`C,J)RYH"e3F(gzZ((aZ p49Hz[<ydIY>XZ$eDgG ȸi$vݸ~W$81xX E!tD`N&>38(>d8:άXַ{mo@e֢g?`3tPwbږzvkl_j']TTf8CہߝokOC^FMOItc\و # $zGASD@7R׌'5aee2t~˯wa{.x EI셾ϪS螛1,;C8*u7[*gocxC".2y`1au^(tL2W";N{>{,!w2;A9II^NG zz1*Mt'#8܏?i#~(wvӣ˻w%q>V=9,-?pYd~SOi27 B4*<3|fuûDs ko[l;uҴE]Ӳ* D5-+Geex0%&(C, I#>G NpwE2.˛ S VoHHQ$3ާ-9[߹q8IjkYpb_?n+ʶRXQD!^8 OtrM[85aZO_GBQ?C? fH: 7 0X&4fiЯ9Bi/ g* 4 /`59 dp*z;@U0oЬZp̦fec.8zGh<[أK?XVckĥֳ/Ls hu-lyy3-:m-eRޝ_?8Doy#}1:IlM~L `!Ɏ:Y\qEMdOA\ȝj୸,U"mϥjŔ޿4{N-L~kejOky^m=/WotЛ S 3ٖA^!.n{#7ŨbVx[O&h-/5"^m}+=.I}yMOw;jt 6˂M/ 7`"n>x7ɋ;a4a@iLX'ܚ s"1GLr۳ovgF3RmRkw:3d)ҷ)7Eb ;i q K;|&p/276d_F&=zуJEkvKHrضkSU&|[ϡ(aB0k\ά)7CvVmm+~ŏ0f70/K5]f-\b,:Ln8_NߵVR& 1A}ẠjnDh猂c3ʷWSG|m_s3; zC{av$(n]bW/]c JA%3|%gS4axs]=S2f}vKǛ\\ߪOdZv1pR| BvY4$ oj$ysQ b@.es=L:gJګY\%ri݆?LggaM]: /ttliW/`x9h0M pe~c/-صنZ`ʅj`VK7jG,T@:~)X(/dθOGqOc`bҹVʼnuXԁ6xƍxV,\(wp)Bx*ƌƁZ1b"i)/gݓjq՜ZLzCI7)?;mT/ WM" `vY~lߘ2PϖLbGx1&L{+$l-qvd:tn#*Cؾmwa:#Y:jZܾ"58b9+DP鴳JX:g4[Ӈ55 Gò,Lйу( {6SNxTAIw(oFcTxGDJNN0'Tf܁8 !H&H ]0HaSkd]gîPO5LPQ)DH!T猱BXge4$adx."l=*˔BEl%i"u"JG w{]'Dj6 IXii) )ZND"Rf=5J%qxo`@1٬lhgχ_3MCz#0Ũ N{}_$tQIp&Y!F(ICt?ָ,Sݤ{'jk KW1*WW䨸r>c83 A#L\Ω!1q8ϝy'u^@0YiC(X.2\jp(0!=wR\N0a0)cq#UjBzLWqBTkQm5B6_t ! xA\Vf9lQN];)G#! "NznlbϭuKA:%95q wUl{Q> Z'qth-(YE4  p9L:UĆ+o~۝on>2[u0ٍ\[^ZѿmDрmP9 < ߂`}j&ː#t#)  lU¹Y]X3F1`ՈG`b*QGn47Op _ئRsW3$l'yl+U3L'geYM㇓-'H29pRLV*f,lXƒƃ)bIj$JL)q4¡"h@QAsglrD`|J Utȴ-pn|7ǣpfu됑ܒ1fLQH7rWB$H?LNd,f/%a ^ n8i§le^lٓҧ=5y 2wSуaGdx: W?~{y?h's/(b0( dBgAL~tKJ"cUh)h#ԭЪ,ܶ4+'y6-OkoG㰠 )F/YnUGg^6+Ð3h0.o:^-+I" ^5@t^N1U,>\V-,Vw'02ڴ KIvV!9ѹTJCqY.V;ĤU=V2蜳&۠cA ÒVҺ:RHV S_چ[Lݞӷ7S `uwZ=ՇZc{[C#R@ZVLǡr>"ll`I* mwWk*x׶ޘf eS4ѲwGM+QהmCz=fC-CD }ؕBp!>IXQ.@ɕL%o!i٥ء7 )C02Q(jM1+_ג%6( Ptɶ1|ZDA)P7N+8R|$= `1b0V 俩u`3qƟՇËlB˸,xn:zKgå>P~AQHgY@Ax-@ )R5)s!iLd< r]`Y\5ElȣdDR ! u.)V>YRk4[DU1HǽtdUwQ` \qDIA𪱝5v{ThaK(*@JIF!RًJr*)re<"=EI &)pOn~.cea!: 2 䚮$QUHUq60@fP+M8i>"URv5 *DʐJL>T<`/ceKP/mFrGDZS t a }i 9] GbEk 4DAK̋TVUS4&)sa}rW`~ѳ]1f*O#0_! (JS5.ze*'e(@ܔC 0 F cƺt|r+LC:ט=Ձ5~Oo_tKVHReAPl4&^(mWv㓳Z5„N;LQ2luM'w v1D¿IK#ۭ'^ovo| #VҖJQ'@Qf>( PhQ$/mV^1J鐔uz_P $!(s. \3qsz]ZѺ%H>x7ckn}t6'2ƤnPsQ &UN;Dw^_u߁wG |c@Q"P6pZR(X>fR^+p@MP {%Yf۬4bo߇^: AL=>? yz¤Kq7u=M' P*kTT.*.J6ϮY!Xw{D!S B:aN6 D(!Ñ$朵R]Y:uZ@dtݼE'ΐ.Ɇ :s?%euǶ6vG$yMteFJc`+hŅBA1H`=_>NF%|1VlQgb!2[HZbݠW`1a}ZsG8"~Ց[.:iɶ~ l8[Y>N$_'9L˘B Bp㩺V~1fڱ?ϡ?|Luc佸qv~|Ke.QD &Wo8ވf=U](2t(bZBDRODw/v{iV鏻4E1o7iMI)$L:R83чXG(K-/u86s@O[|&XEJ&rHܝ-3G,h sI6K'Ǔq\s_0y̽[Y-MsD9U8BUVJ+oGbLөvR[+#Jy/QE,ղ -cĘ%{ȭfܳcC-c)^*uvRVZ:'DQE81 y ZX[Nu$`Nں;- KQF'mU>C'-!/+[1Qi@%v+F7a4%(W0A\H&j/,ʖJf*R`c;dc־wnB[5-xK2hH>Q:pWT+:;y01Ԝfgv:z_}G^ _<%/G^]1C?л /Ou)=L\Oؿ?}ä5qϏ=LtvKf@d{;k2/ϖ={>$>"Rg@_H|& ]\z=<뼓Wgs+<7a ow\G?~i78S?l`gGit6wϹ7>gO4\%#at5K7bm]h=9׳o~4f<Si<(BWZkDL؝LJ9 )ƮH*ȝ Ġ*pyo,, '.]ώFD|s]ifo6/oDrWB"@0fQT$?}6 Sq&{$Ctqc<)T}r0D({SijN÷Ft|>iT9̹MGg|]3U#{os'~aXߧo e(U4q]nqx> r+~.[za^;@0bҸe]j-;U׆9}!|i)A*v -bf=꜎%a~k`*].bZqXYY݀ LQ>_t>ȠQR:堭1 c*3fb.Eh,*; V;JpqFWoFSǤky552~ޫ bݵݶ1P%zzǩ堤0˒U(|c!c%<͡s,V׺'ݮJG;t[UTX骲/^"ֆR{YQN!ɧL vՋ+o:yLJ^˵;stPV(ATw5HV&2lM&U:OT.9>Fjer֐V+FsI,ΣNZ((]- J>pŎtHSϑhE֓ozg UV;([JT0dOR*cǙǚlIݵZ"f6r]-"HY9ic2&%]_4RcCچTG[6OEK9_/KGb ]r^[~-)7 9 sU~o%/$1IoFӚC:_NTؓ,r?1j ?lϳS 3}Υ^?I<fsj @Mʀ Ƚ;uIJ{a6wqr7;c'n&[[ P@֬|!P T)u]*fqX7_rA?a<=C.G'w$^}~'z|Nj!du7MbX=)NJ(wzpzAftWOSx:?=J#V?߽wo/~y7<{R$C GMߴM+޵Ƒ#_)S709%x30ؙ}}}1Ҫ\Ҩ$A뢋RIeJJiv!If2O?ijYu(k\͗ 9[*@[|EK7k3^"5n~a;_hs/$nSn$D]mPьrB#a'M(;q ,g]G4-m".\=[.g*"f{a .z>qm 7+KCIW&%Ѫ*.+b_heRb Ru,hW ΂ j|sWvoӪ)teBwV3*O}룼py_h_u`CT> `@%ȯO_}M::A_-럠WV'D8\*ŦvBUNU!5U+ޥ&f*lT1I' _gwhC(,csEH3MV4I0d\/p׼Iȉad0 jl>2_~JK{4*h sEPH!s#o 2:9eفѶ?x}{N6YM@NxȞ :meػG~;774{##6q5bxkm v> HJR,eOld)v gF'nG$n;m: yFE)* ]$)-m5T`BtG& f4nJ`U(꘹`[.oLhb&\j4E{= U3ISƞH꠺'̜.G59kB&j-Z9[]\YAk!Cvku`u5Oٟ纷!!D2A.13ژ2@m{*c5U5 b]mvcKE{1ߌC[O~?!=Fj?zpQo,+a~仾= *lzl1/Mqe >N+{D6A7e<Lai6q/s$v 9<`X1&R1i#2R5R5i  U̐1s@*Xs%Mm/eVyW'n:kCЅcKVڒr9)Aj;z[u CǤb%2]qB%RPc fS)&N FWXsKG-Ŵ]*fE{66rYvvLl؜,Vfc*˨g2D5k1 Pf%YtrE|jgj 'yJ.-xBReQ5XvnyO ~=UNFG1m?lҽ 3O{}mq:O5?-Iu>XuqԼQ[Ov !jˡp$; T7WJXc%HU9łAOUbab]X p Dbn¶֖[pyiڗ ҭnb Z#.@.;j."(;@yVbhv`{* #JңJ2#J2`:Ae0kMJr4!t0 ?uŋ' 쬅DӕuRZ˜NEV66U1Pƚ1YL.ۙuv1 :+vW@WCRUrPT>8%2#d->#AUrъKjQgjjKSHI0V(Zv҆"u6ny@۳DRy O Jz~fq~d J~_>yy!'Qߩ'm!:oe+*#)ʞ1` Xg_bt^L(Ev | 欢U@O&l ZCaEHTodV؏tn=P,tMXXxUֶƌw^Y\I[~\&7/]`痰fEHlyꝭ $%(c-ھ )@ʯKQ 9{Fٜ5 6!{`*.$_TE!^UO| jw~BS)dގ+F+K0@b2yrVoVXp 0C E-A"o8 F\+,{oVxr ǂǡ:#bqBJ(YZ2pⱁ?6XVĪSFI0*o*7.Nm9 g2gP|:)6O@KЫL&蝊Wu#⇣": ..qi[/9;vq U,- FwX)nTR[Ȇt2@ef(T&\| \ vaxnߣ J{yy#3e1:=Wa~4Iy\9#W4h:=f˵D `r!=D xdWNA?b{6C՛}.+׳踜=oMEѿA\c-xTrYe2hP{p [y:+O070WҘv]y?z[:˲޿ϟ1fj"hQD7>%"¦:a}3"P &I IRˑ X 0:p*!#uQj9~ܬyB@'gr)L Q}CJM5Zg6PBK.&'+1QS 'qS|͕+P]1O99;? zvBτ<PBN}D撉ج' RV$'8 iB.:qoo5Mtpza>ާ*+ u\j4_e]AA?Y}7X~ަ;lqhdh_Ez{v󼿂RGT_̀7θs T6*&f3ifx@5'ߣkxGVSv?+ooUw-dL~\Ikہ?ώOgqAȔ,eSjH6k܈ Dߞp}|E{]qe27pYg岅IsV'7?fV5[~ZI1Ph{-Hm^n ŲϰaVw޷Dl~ /rjX_V2~?yКȯ8Ľ}gI-FL1mY:s?ߟx7ųKpsKm &E#j jݚ)[!%X)Ir'&$Qd /oDf,縵xLxB>lчJјs٧)eC^XzЛ$r~Q/8zy*8TBc.Ap^'ꤵTNz5/n-k y)k>[jq/<b8ȷ|'|xoW}s 0 t.u 0k\kץHsU_/y}-B2~ʰlHٔCqg߬ iPB(" HkF5.'lM|ǵ 5!6W6b @c+?;j}uԫw..N/0V oԌq!nMnf;7>,wgӳ?XZSɺD%]7;xF̃ynRD䡪J./ivZY٣;>@`PݨH}j_@qԥ؇EoQ^<`Q~+Dž%^w:ɕ'<]{)66JjB,@19atl2:ےBLǻ#w#ǟ7ue"PO47(1Z86y1~%n6 .Ap'j2XNݬD(/r5Y[3oB\mrz~qyTo ];0DǓ+>yp;<0)x??[7劎ƨ"Kk޵Y,J=2`&a&.~c_Yxc*ƛlC ^%Cd}J&d[5t^!8ݯqsɣ*K'jjƪsI9QJr-/5kZ)*!wnkudS$5n );QƪM5P>b##+$V #_GtHK ]PKR.Д 2RJm𥁇l Ik|1'M 5IZ'͋Y笽EVU(фŠŦ] Y 5"frYx=j;.hI[V Ry\x{‡7$v@F*eU >=f'cAXr$٭(Pd@

7[SRtXY;C3a*?%1UM5. L tFƛb]K@~AN2MZl@PM4͙H J8 s$eQcj LڲF8/EzeoH_u, $?5 )M 9GCi {OJ2"z_\,0-!iTy=?թj!Ѿ&hH 2BDBi5YV+ N |u9fVcᛯyk&Lo[ /xrneRxƬ䤁1FlBu|f<dž &@wa,|nmע+b^Y?\p %cc;- 9/mZI|4x"#. id:rraV30-ENpƒ2 p A-)J# I V 2eNÙ.k`8=GDK\YZ{[x3 nCf6}ƂչI,TGWgjbܑF9m,9 Dw <}Xoݷ+mjG#O*"9-f`I%>b=^!ExB.@ "Q @r!01B5@ @~RXRr,A 6#N:G!;0]| Lٕ;faM)"z,npZZ2΃BwzP"<)Y cnXdי4H̔hȂ'J!2~\ w`Dƒ*p(X0'a'D %#@ DEL@kOX3,25$BY e#PRSix魈XmX@`&$l@[Ӵג)0LZShӸn F狓y}%K7,^M̵L"i 5C7@7jtLGs nC10l=MoG!͢TѬ1+^k>Hr9)E. `L`ռޠ-'E<A x XmJ ,m2py}~N NO:(r32P22t  AO!H rb7mc2Lm&Dh9`+BҔ*cI\ \D`|{Bʟt׻ //V0 .MLBӢ `6b2ib`xsPuJc-и'`҅H*~ z19Zg$8RHǚ4*Ei H>{IQ!W0-(Us' :E'[jʱ쩼~ky#d__o]UEkJ W%*^HD;T]'^sj&i{u5׽\^sk{u5׽\^sk{u5׽\^sk{u5׽\^sk{u5׽\^sk{u5׽\믷9xS^|0BlS];C~ny>6Muׇ]o6{8A76c>B؛V`L0BICR{PAṔvRтgN',l`#3]YoH+Bc߶h}.zf~XLaezRHn3AzXϔ'l%OI<']Ktɓ.y%OI<']Ktɓ.y%OI<']Ktɓ.y%OI<']Ktɓ.y%OI<']Ktɓ.y%OI<']KI[.I`ͺ' pmwM %ϓBm|xN)H$W;ϳ ,bb)q'2I`,&%AFsě،`xcpׯܢ >==|8?Z e{H^7>,|!y~}F{-G ?B>OBс=כxkReM ?T\cYWc=+X|dlŇs:q޲+eխ6Wx5 Ś0 (q+Q6" v[q*x/KRT(U:JRT(U:JRT(U:JRT(U:JRT(U:JRT(U:JRT(U:JRT(U:JRT(U:JRT(U:JRT(U:JRrt|ްU:f~u[M7r76/6{R^ x`AST2ڕ"AB"XTe]!`m;CWv-m+DHK+LlX+f!$.ァ U᧛ޏom{Fry# '__~ЛfXFYQZݣnh#/7gmV n~+P'P? 7JOCp;ۯ>WSא*[&pf ܸ~ vL90 @x --ܖNgOsÆA6jIUfUXY-Ea[x9_ b1036•12m1Ƹ@CRF]`*ARvn=]!J+ ]] ])*&+ ]!\BWVҕZvDvh}Q~.C N Ѷ?v(My?vteK4]++MW [WҖO}?tzursٹ4ZN{Ih%9NCI[jW'Е*tҡ;DWX}WUthm;]JKI +Ʊp KN;CWWЮt(U.8Uj#su{+pv#Rjso .Yؘ4µ*9miDQhh);@ߧAZ3/UgVAH[}էt/tL钋H. m"sXJTJb+/2BykhX}=]Ma%ҙWkZ)L(%HeIQBS5љ^f=Wj6Ѳ*ݴL||'cRS,m&WYUYsr9&~ !RNJ/Ѿ{)rQ-^x)[#l P5!B.`a4GWtg#N oQBWHWZn9w+D+M Q*]- ]!` 5Î(my~teUZtleٷhy+D)uЕ>r~,L?vuy4NCd]R,X@WKJ!]`Xg rBB.TNl8 ]!s's^OWRBWHWjۙЊ2 7^Yi#W 斘|EGW6tw-'(4Zؐv%B\@m ].Lў&i(ۖR]J*2+L ]!\BWV,+LRBݱm+Di3xt5Kup ЂvBT@2ZN7Z ΄-%(-tuteKBv]+@hcW͒]#\Cg y UgNC{DPږѕ9L=5RQ!B;tpug vB@b0K:DWKBt[D%.8v-K˾'װ57zEVvon)D}A3AO"t#/!/̴ 3' e8;aHR[ۦ> >,O=W@Y Xox}/:.QϨX?.tއiݠF:^~\-7Aл{NuAB`):ӟWb¤_n91Ji3J6ZIM)7YRp+; ܻG|1MS(qE*x?!. ZSeD.4<8AxgˤbbN*ђ(R̟Ͽ5tMWNw3Xι Y\ON/aZX\_ua4KT;y`O6I@_x<B 7*ŬEI51#ɣAyad*Mb$5:A73I[Q,7(M4sL&dg 9u#\Z&K& Sp!rԑ'ŵL $MIZse!MUMF04sv4qՆZvE{ҕy~{Y ? x\jK 2<XHె(!NR׈gj(d ZYYn$Z ;mQLG n]-_V4gW{BJY}Ax E2Oo G?Q2EAtS\$fPqn.-i{p>p֫c^5.󙿡#b 荠{E`Nͯ4;ȫcsǼLie`:ir5Xwrpґ##0ۣdžpP1E<}MF/m~X2 u}"}7#/.CG^ZЯjv5aW%}@ueomTDlD+=`8oNjxb XER/imSb>#\bmf/"TWӫ!,>_w |l~~ wXnm17?C7o?‰e˓7z$ͦO7ԛhBr!,{]u~`ЫSp w4sfʐ[׆6f֖v/:eOߴG&n6\??PIEYPIiI 61DV>W&Isʊkkñ|KۗUA_ݳY p Ipنb1c8L0)035>z33pR6n72hX;>k,6ׁ[`=M Ir3ZF+4v.::udaC;XΤ\KhWw-mOAmN_Iyk k~?b_j0 xkZNĐ*jpF$,̕;s%o>7",;9 YB syV:a!1TFr0jhR(puduJ. 1PRrR!ehup6y6XK+zgqnQ.#Y SLeыUxѣ Om#];^hQ\ء߶_4JlW.ul][s8= tfaf-ZVвZhb=o$pVr>LF=g\k k{~ ù5Ag޳)r/reo=l?ٹY{ʷ'nvGq_J(*MȞeab0myidjETN)!DJm6 y{pBf22rT*KDM$*\&DcpLL x2^/ Q,8K; Sln>ܿz|;r\}ky߀l֔p82D)XQsC&$#TA!"\p/J0:3gEa7ab(pb&3묵GS0&%RKǵvu SHZkZi"QNd@\N1OHf)2g&'gJ:S)β=O514x>!}%n>jarq=>¬I?!2Ȕ8XKCvKcy|bv1CC6hMQ2`@deY,f:9 Ԉhll|`}'d tȩ쟓¥}U ~%=ZqG#*r89^]zDvaUv<)`J𙣹'bWe.U i޾Lt{0gjwNEf )U< ly9M_4ð"yfF>]| g M*H;-/<] ^^6 3nm1S`%W|fK7 (CY,ЗN EKv9IAj|KxAȻlP~l|uͰP ki<a)͌FU+Ho$p~W`5!0)pփ;b Oh)h1yy/_ǚ*Jd*JE5}>6fU3%ѩ/:)&M7F8Mx| ;(9/{]^ϯGt0٘)(y{2<t_rf˞WZ#djAroꄑ (Cg`<ߧMdXͧ Wwu, 6f[Ms xI});a4a@iLX'ܚ s"1GLRA,/ѧ/^Cv d).RV#^Rt&ot1pŝ { Ky> ۉfƖ.Shvuvo83T4ʟI(OEȾj-s&ML}dJ$ d"D[Y&V7>p}S۸}NPz$vʢ˥|s6ke-e` 0GM|q\ 购<@rFϵ,wJÃ&v괭p]\צ[p0BoyT ;io?.M(v=C!Nj0f0.ǔ_CyeO&|->}m5z?5&W³^;%`Թ6GPw4^KkEiRH37 Xt< :U//e;\q/[Щx LqY̙C:jP8+uǡS5"ַ]oA2A0ɵ r B($@bh՞4#,RFXԤ[{=kG=Y&=@\?Rx~t+^mZWi, O0%ɀcl)t.Fmqcbiδ7Aj9ӜGw9йk a] ':#Y:jZ"58b9+DP鴳JXÁ saNM,YhA+DÈ`=DiТ^9Dx!UPhJy29Flߎ"Б H)hsRɜRafXTqX .8 (O -:<p2zh(A!ĺ KD6 ()DHe7B䌱BX2G<׵่@,SV 'aSE(({D xOzBj)IXii) )ZND"R M=5J%qxob@1llF{sGSC%3{=@$9ͮG<a?&$0 IbIӠD +{΋Wj>oUcT"v:uN~)a@r ]s)(!udpi3`Ga^(85B!VDYd xt^԰YQT dP+S0j`g_Wp⥜pLLEm4 ߿jiR&k BVq~ijtVO5gzvQ]xF"\8_{K.8HF\ ' 3o0^0f5.!`8" h\(IGOFOo\fGX^dӬMkMŎnhz|ᡦ41i{%ĤK@M h3Ft|jۚ櫧ݞz:Tx75BQzrˑa0-#:y)@:|"`%g~[QXW4nj[ThP0OlOK_:v@K!hnq;cؚ$G;Iȧ }̧Yd[PhH8鹱Qx=c,5.tpIf5WZJ!0Zy#SH杕FSyv)i CYE 62( F刊`)  iOA[M+G@ǓxbYzn9j,ey ˒h"oxU A|:.3->6LHaO941p{ό0TioBɄ7!yLH!Rt„%bRb :,!@=ש@!0T"h]Bu40Vhq;ƚ%{-0ƽSM3-+AҒ/wI۩@s+vzяPrկKC.Juիm|"JLH_)ں:0c*#80[$hR\~?|6*S2\؜1rΕu%u}^Yydy+UjTIo+XWde<{ͅ[%@X`a[Q; 2ƀQg!,d!g!WyH!p_j3)-ܾ\.**"P,SQc U2f&ec#.> UewļV:ǟmr f\GI `.,ۮ]eVmW|R|[ύasM΄&8HE_|M.==y##,ƂB Vyꔜ,I&Gz ElLԕ0qZBJAY&'1(|Yfp[L}`9 AiJOA SracF䓻2 4a4*iUT-7vRo'ݪE 51Z #Ӟ ShXަc ,(ŬG+SH[ÉGRYXԀ7Y0W֐g\ƱWT\\\Gwkv]4diJv{/%+ñ)&mKu@:0t;րŶ fށhUiȑ<*z7Z~4ŷ13oVE^{]IHQ- mB4ص.ш&6c!G:Ao|cy}~?DT!oaU>TlrdÃO$fkZVKR! :`+ PDQ/S#XW-EYޒw-PlV6X(PS/3Ծ65aBe:ΠwT^Q Jm#(mgP QGQ;bum`Tp>PJ3Zw-W ^-e^=01n'VK:o3wha/KsZa}j!`\5AF*$VNTNj_yDնm1ˊYwZε ]c#iV.hKOm-l* XQ@2tS xJQmY^/b K)~/6ԏɎQ;qyߧV'&hMƠ70?>_5{Vq\jb켵6M Z*lFX֦uN `<::T 7D\z_cD27_Pï }`c˖?k!)C TmShc'31'P%}-oq rz`>bYIp%Xُ,~䏐~*S -ϳ?qhw =18Y]χ8yE!,|ѩ8};_|rWs n~ljR}3[;?{w0}fq\`ݫfknK4 3:663jbz#//fPNsqkqEWL:u]1-j:rBČtEZltJhA%+TB]MPW^x:#]x:٢oGWUOT;ࢰp\] [$TW(EZt勮vz)1"#]1qEWDkH]W=+@zUt.qEWDI]WLjR)a}+b{-'ϳ3}M3~._;J6V%<_n3gBiu&_y_ݳb~:yA3ĪМjjB8?|zr84Mt6f\MT@ iS4SN45<‡zs}~{Y>[˦I(zsٜ҉Z%@gޝ-r>w7mZo+y["DX]/.Mg2퇀~[7kIW%e׸|1f_ϯ"w|!n h+/_jꠅ h*o5V9A->{-kZ˽i)}Vsg+:5xZQAte<̎te3.\tŴZ+ܺ]MGWV!Jb6"\-D.bZH3Ȕ NQWNYT!N|tŸbɒLuQ*)1#]1.dXiu򏵙]}3RqUĩ@L0`}VUWiq:(VJg RZ[tK} HWd>b\er҉ k銁FWT.b+4jHmJT]IY<%Ω$2^~^eReu>%PN55q@a^~VZzֶI>w 1VX7poa–|]zUA;Q) 7MW>tM@֡_w^/䮜W_=,dGh;ۦExj%vFIL:мԶu{4_ ƹzglk3Ĵ:$4w6jHW d6b\ͣ:#MHTuC銁f+=Ċ#iUS+Y@Wg+GWLKt5E]Y(HWlA@#ϊH"u]1. S9; V%]1EWzG| B.⽯N]WLI_Uzy*,8q Z}j>r0ZsjMksa%J]Z0:#]1ppiSS) 7:']1pF"\+e.bZeRS&^^atwSG 3SgB;o@NL{gNL)sQ٠eHu]N1rJ&`\iGtc+G_5nWD뎴#@JEWԕvHW lm6b\s*1j2^&+ 83H듏l"_t5]Y0#]1ltŸ&cLkbʭ)+T*#]1GW F2L We+}uŔ]MPWJxqб}uE}tUtŴ6y]1â) CT(yLSamw*ʀr+O>eN(ʤ` lP4lL'5zKq?7_Ȝzg f\Mh-$?͔}Ow%J銀f\sjҔ̂)HUNb`qEWL]"J#++K`A^䓷ɸ2im EWԕ@keb6bڧn )(<5.#]17pfiecWLPURzuJ/@5 [WpǮ>\0J iJ Е*ڵ%C;E6q].bZ#RSZQt5A]2#]1q]6jSS,H5m+ ZxLSaJoV̀r+434+FӌV5͔FMOQRҗ :e.J>I hZ7qWj{>ߟV/NyO|퉌!:V7un8W,wFe]{) iu]944 9 r 3A^"Z>u]1*+Cor+OtŸ2ruŔ]MPW9>MꊀmF+ɧ!0e0Z9=b`-'ɓhHSʢ)kf+v>]2]1-$]1*IߎUB+厯AȺdp-0J8@WXtkKmTFb`+:uEJ C3|++=Vgpej EWѕՔB#ϸ3S])nMPn̓2J`FR܉J*f%>]rUļRo,3:^lK}c!MK_&d̀Kʓ&[Mq˥wFR$?͔PzgSip銁]>I{Y1hL~)*N]1&I+u)-Utf+Ƶ<#Z'1u]1%D)Z-JbƮe+`)eIutERltŸ&+ߋ+9uA6b\ؕwR6t!]GV>uI|Àݱڇ#GWhjej ztծU/QBd+v:]a7KWL*u]1qEWQf+HWkEWLC)U&T=H|8ps;R>f:L*Z?e*\7g?㧟~Z7 _ͽhRA[Wg;+|Qҹ:͟/UgO|b|>6[ ])ˍg;jw5~u5o>OlWW5˄D'0͆CBحw87_ϑOͷy|2@wOoێi;7' l}|wx**}&6go\ Jv]+SMвZڣQneFwi -,oy{k$PQ m7#KJO))/'/10_yQll҃}#W[SH-y̓q"*:k] ?]s<褶OWp)yӓ.J2s *sriܩK;6tu7QklrʤcrCSVWRՔjM`ԃ>~!NǹX_JC֗Z)* b55P,0%:dՂ}N1hi(_mh- 5Q5V1K6ׂ`MԴnT]^u9MYh-=Ç'TsԻsfc}NiUȼkg3k5M)ՙ&QZjZB؍ ]񌱱k#Mj{))j'@5G첮m9t:`Ua#95nj2աL2:玩({ c. Ul (xO/ oTl}GJs`*'@ghıbz$iHg8]V1T)bO{:93Xsr>L\|7D> PH{zޜǪ*:;ySSI4xN5tln9=H(]bpOpߨc;9.ɇľ֨SbcWRLXhhM?aʷ ҋJshҙnIVCK 6Z З Bc5_qʪ,X']/[jHQ!Օo*lY{6T.Zad9QEʧ: VA`Yt7h!QSӰ=ԆR iJ2P4@Zy&0`)ddjAm>+h-a}[683ʆE BRܪC:}C/ QAQ)tJ৒2VHLB3mvYEoW(!.Nc@+7CAwVrEp2nPư)0uߺ8NK@ʄv9H'rŨ3| ߚ)HYX0a @TBBB)< QCK@T`6 `rBaVZ hTcMpf%ZM=n,\ J!Т#K*UѾ`62 i0ݕh!8K]ZT5{QQR@}Ji<ּgt0[̬ƷKV52+ ʚ%&tYk<2DbۃB6lb =Vc{l-D3& {ʠ{thG1.UU0}6~=>˻~"di*sѱ2X6obM=Ѡ%TA!evmXQPwK1g]D;SyVa^K q[8GKٖЎe> Ғ oQ $>uܺ?Z=USĐ%̃<_[.GS>I4]s$qGj`.`;5B'eK〲quh0JvB*١bֱ׮aXS1\kM/ roٮ7̰l+TO.w`E_1h"^Sm:1׮ >Hga}GA bZ0p c-* r#5ca<RuI` B)1i)0Z ڂ+qc- iU{/Q6ΙI. *QXOkS#F_T쵛f>@Mě΁ rUwU|vpڠ@Ü~[_°Wtb^]ݓ}j;>Lͅc3W8|-N_&R_2DͿ wiu>;>Q|2ONn?¾] .H%&,I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$П7 U=["$z$F@ނ% @;KH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ r@hʭ) kJI> % @$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I I$Зk^Z?7?KM^/o/_`KkM~<=!*^Sp I'ά&?PF-\[ct%xywuxCC\4Nf7&Nr WfhܡnC/Pj VwhF+H7 2冦?;9O'5o?v,xR_@.Z~7 f{~wp:=JoOǏʻVO&7A4N㻥X7K~_wњuIles.U]˅']=Mw\%穛f&NM%Fۃ'/[uh W}}5W76eWύ=t{r'l_{#g׊clg5.n6q&_~RiO.e^hs7'𫟏v6ߟwݞ-!>34,Ɨ_fs^ߟ?~9TkɪpfB_SȧJf^75__ٮF.)Zh~vF ԥ+دǍa5n|e (zt嘍+8fZ;] F*(5Հu5gOW/ztOՀZj@BW/XyfEt*(cW_-pϝJcW_9xm9]= wߘ7~c3 V}#30]Еzk="`CW6Cs+vJ+Crtjjv-t5ZjBW/QyEte \u5Іg[/^]S MLZoFjʪ}@=`m[@}x¿|hr>>vnG}6E)ؙΰ3zDtk:)'5*e\73FQS,)ɚTqqT[ŤLe^oT7b+N.q#]Mʹ֨+@+QJ,;:BH&JU{ *BW -n<]%M;MW+9mJ#np_a*UJݍ#]).%-X=U-tjt4J\Jh?;T]0ЕzW}$", `zs -,ʦ&v+վ 5m]`BCW .SmVUB)iGWGHWD-tP{*ż-ttJ(xc 2gVIRMdyTrZW)Ht+hx@NS֜c h$qkdu3вrKڴt2PAί}4ŃKUf.}>|ufI?W>RcW,R^+'6{C̟J~c[d gK|#H _z$O:\|$G*`gA:_x5_7M:4-ʇVCuy6zod(@[ IQ{9|S?[ 쮿̓^K2ޯ3@BGj $DMSBᴜM ]G#}ZԻ{{KP 4P¼Y/WۺwL)| %}:23.N|6a-<xA0ԜeHLưdYR:۰jWKOq8z6 Q{.XkBmPX!YTs{%&l |]tÎD A  ^GbKX^S<;R d) (DBYc Ip0!|.٢,}`MX>Ȕ>^K⩍앻4R{wg@ƥ^盿'Uj+>u;P"X&=r\ޚe iqkʻy3/W ŋMTcga3e̢{85d:K#>>h9AY͕RLV*f,,a"F24z&Nc eAc,6ʌ ;Q1,%! jCbuy܃o',V ֿzlq*ˊ"1+@L#˜n/$7d7Z:BHaϘB3kϼgF* ״ !_( !yBHѠR4"D1L)Dyk"F@@u(D_k 䆕rà,!gKBVr0k.4*`b9NT2Dhô1`iX7=B? ~;k!~/CDPL@H0]0 BF5Ti˘F*Bd Fd &CD!SDcPDX,-,xGI4 ĵy܉Wk֫iw%M'~\z@ut=g.v#QKVn,y]K׺x`nN_k)Fmq1H4b2%w9@8fGE8š(6qrl:j 8IkPb9E-01T: mLS4+6p|qRZ6R4shtv~@B(H`i^^E%CpooYh¼#R"%AJ$s2JaQiƝV2msp0AP<@2Aju>6pFX6)V/j0`Qo yS!T팱BX.Do3SӶq[ἡ2eP[x2iH L1 ?g烡/$(u0o+B Ls\X>3L yşjX\srxoNJCã-(N>Ӵc@A#L_B#7iût$xC--WP& 7dTvlJA q,?kUul|| wkɢf_H/gťpLNiPdL[4`Msh+. k)[罍2-55s~|vpVg[HTE\σQ..z]0/YҌV; B9a*sb|{NWUِl̦l,}sLZhx:`PǷJù. YkUYQd>2`>?'S)W쥞ǽq1Dz Q6"sK0?뻏?ݛxo~|}|hu50 .mPAgC94U{ӳƸ!eTYo1'&<U[_ߌi:mu+R iX|/VG~E| 3x‹PQOē@C'6"Owd{A1ƃ1ңۓTIeG}KieHszʐ` #NznlbϭuKA:%]Krr.W4lqcX3}NQB<"8  #_ݸN-:0_?|+wڵ6k@OiXm}B ]}Jxη)Ы7eU7u0Sww|=M7^֫ul"g4`)$T@d%RSм[rLKl׬}2zDgu(FO\t$`sɥ( fI=b ՈG1]Q׽@xd'N͚k.g"|9뇯^{R|_o=BR+։; hh9[FJ!L8B} ;s'P'ʏډ&G7P[ oѷ]|sqF?Oyb؋^ֱ `ǃ4 3:;I:vQLppC7Tp;oB3'{A9p,>P)6VH9DŽRnqGȧ`4K-@p.n.7ؽ "KdWeE㦻DHUX(Y`q.P0;k-fhd#B77zHl|Y'nZ4Lc3/xYHiZ_lI>FDSYOPN?_h|\YM[4wX2*ec0 kЖRuu5kJ$:3άpN!h`"ӑq#~Y'M$J쟰*4)*7l%2ZP $.2љ\rsw; BWf9Ыh'd) ?ޛrV`ܤX Ʊ̌S)HIg".v_5hdȈ-lJf R Gp<DDƤvV,-2[IaIz Q05 d1l;f]$.81eQKfӸeb͎cQ[WFm=`wxʉi{Y҂3N D +Q6/݄˪Vq !Q&CXC-8΀ I/OVg?F׭P8MeD4#"aZRLVbAvlq0f $@<):j brh3 mH6\F" 1l!YJ`( &g^u \\4r i,9+*;∋;敷$K)b:G ;I/WYOkR#.fQxh^0V7}6JO}/]#JZ+i<[<7a/=[lugmN,cyqը}Z&fou7%ڑ?qH1rl@1rl,Kcs £g6@!)`SP*(]Iz LXV< 2SYV9eL)udbEIB10"6Dl/vSM2|QTdJyN{[YH)stڥ#,W%Ds'%qK#X'SsKqR: Ҩ4w]yH,rbf^I[YUOL#3&$ey>9w*+&_e\ϲ']B;KHh=fq'l21Y l)uHy'+Y5qԳe)ˌc2g&p"[/|!)mYRX tL(Gᅱ8?r Nd#|GJK"ƒRiO/3X>{xZ 5^jUZN&|Qk 2{Kz0t7Ԏ#d6QK&B p 8@9 eZ`gede8Ӵ{NՃNW,$Uw)h~;"$[)+SүҋLHe)rIVW6yPD:rJ=t3g;Z&$I&@Ew>9Y?.FcC| 9V;xe&բ,/}I6i._AHCDq fNV\g]=x~.A% n((t#7Yh24cZQJ(%uzGV?2g R֤LBA!EV. 4ZGX]o7mR#OvY̼\HO,y}qQTs=ǫ^[s"8R>I)ɨOm>]GvDHr}7s]3OJ56bjM>#֪pYq0vD90 A 68n%0JIh tF˸'xIcUr0p>?fVk,[ftݭ‡w]/ܵ;E[mj=֚K_KoGb. n_Z7ֻ9]tfΧvH-jN?{xKNu@[-wχlΐc>dTzv8n6mVٛNle\P][mn}mg?fFrm'3-m~Nm5}GXQR1'b,5 hBBXhUf~ԿQKgx4..N1r/ߙb"Xk|#[A^kPLj֦r,A7!-02, 1,q. Jb 42}fD5qܙ|vQő)ARnB`n_=NRGMe-;rQ@Yn1Y9sK)cdpD(syxYF`v,w΁6&6%o(MǍV9a2'=Nn K1)x,`PRh]&p)X;8F[y啾_dzvgoJwbؿ/QٮncP=㙾o֥,_9D3Ok)f/N&"fSv(#3E#f!$,G<, =#|mcXdV1KaC*}ޤ=:{(A 'gږy/p!+ ^ktЕWA 4 kf *B!dv$.td|χ*zO7j,v|쪔|׋ Z&>]]n3ZPj~nP46DIN_Jtu~\\gw"|H]y*k/&?rJzٝ1t <^ezwyQwJ*|gҌ8eHo]R #D6$2\8^+3fP͍#m:ըPB"/<+Dp ?+ZB,oHޛf*/%r:]y AZ/̓V:a })*d~$§g7b0Nh< cN_VzJ! (b (ɞ Z|DZ3̋]R}yy$Nd7=+I%qs=jrh4ȱ%4$U4Vxf "X$錉J1u@;t9Iɍj@/пu12 F*Lk]oGW`=RطkܞcdaSbL\X _̐ɒL#kƚy|]uUuu7& ]K{@"f2%Iݠ(`Z=縈nwybEu]:^Y݃XNdZߥ|Ws󒄁SpO=Pc՚xaT<UNTzd oba%p'i-zWl/!h7Pƹa%dQ@R. ]JzVWl]?Kd4\ѳڃ <\H $Fw֏^nTmgwy./%7lV"m5^UʷO; w^Oקmxg^Ί|W/je)@{<{"np4fv\D!OwBd|,Xn;(ʥp>g2\Sre4C}mu <'')kεV,д- JJE@8H5:x$TEDLL$\.%6!/58U";BDJJg4t7:b%QS ,KBBP[ˣ2(e|t*d]_9ʾy6:9gUT0.-;eZס_>Oi(t-vnPEV zgNê}q6>xtZN'iYS9L ;i*^\f6ws:c0TgiIbxYV,zFZ1QiRI?|~2^r# $g\Kr1t-tuO[ CU@Wt= S#J ]%l]-tP1+B`GtKJp{ժt(%]=A' SL{CW .3[W %iҕvr) s3؄PΥ-EORk) p kFTF *\̧aSͥ\qXtԿn/@aiC/ F@ &uD96gF\s6;˜'2Pa4 ),H0%T)BApcIӈOAMyxٽ'~s1sFx- 9RFo+ |lܬ \>R8pP2Pea.Bbŷ͉v%KuX^4^=xkw#DK>4qy10;:> l.^Oh49C[imnS(!=7iSŕ#yu^/6lϩWGϱ ݛo밽s_Z#{{9tGux>-`BU~ozSGoyMYPh@Ÿ$gq!h1bvTvp*޳cknv&!jnY;S 4:3=#yH^?g*ӱ>Quwlf.{sO~m^z-_k[:(({9%ia͝[/:M`~l>6 SZ>a.~Ie_f:R6Eg@AgբD!d4%B5e׫TjZ4K;Ihn0>rKtT5}^UvnaY*+SnJKSTN[,d,yxM6Wf5_O+cf 9Y i(^rÜ9K,oKwwR۰c͉6;c4~}>tP5KpH@${5pue K2dkt `HJqTVFudXDdM+)$*lϵuPxiZ;+ I\\gWZFӏ"!_2Bk-ê8PPM'}A6IOȗhA vح4vnL>~V)4R~gmdx@,gL\as`P)6l[+ %3YdlE{}m{/[>V4n19AK+c01Ĥ>-g[fVvПR;=q7%Bc+0*VX-ݢ'b44W ќhsKʝ" $)If]vn9ɀtIKN2ֆD3#EE5Lz$<2qAxǙSʴrEZue&#QHYu2Lmg5f,`ZFL&Z܊e'58_v{wBٝ񚷊W^c(菻{H´AJ"+Y$^1'+ùJ*p H h@.(qCvYPIkPaa9A"ZV쐣v,wNffyѩLdLUFwlX'QIkw,tnKJǼ؈ͅp/wZJg\Pi0c+R\G9bENHU张yevU 1H~ 4x.烤]cA;,O`5v^mΩ rȀr2mfzh GG9}_v$Ԅ,LAYɟPam=B 1 )j!KoCI5<|LxpO1 3l2E||5qE\%W۵${y{\g oze\`%VfԄVaui뵽Gw<"AغUz{8َNmvW(:uز–/ wmxj69tZndK`y9zY7&{[Orj.tʢ66xn֢} ֱ>DrhMxYk\.޳Dle,'.XG/MiHo46\'᱅StStA}r,>o娴B5!*=*Z}4x=0"ALjM<(Á[h5T"=&b G gp꬯.߿ԋ)F;0h,uDMME Yl ;{oM 裇3(MP*0J\SQLK4{osl.cYJ-Q,B˧ro@ubc:FLB ǕZ)b8$X),kĥTS7l8IaꝣQ1xΒj}m.lxYw(/3DbHh_yuY yÏc[d?f|^=6w~`!X\`4aȎ}Lmwߘ@"?37 -,N, a[5mn7E/>>A~QdG<: -;}/l& ">ayY 0O./;o^mImv` "qm>6 o>-v.5/w]m#~lboI& gjLN;RXYf@9~D~}N.~L.D' x&%ޖp>{JOtxtwPo.r']Lsk 6{m#,F:h =2aO4Κi5ezdsleBkqC6-VNDgUSl{A>p% ,rH%T8Q 1KBfD)F_HwBKFQ@ Iv&2(n$SD ോZxH̆1zyt^ŞDEbs Dv* OV=[RΐO(O۱&."E0sT SZsR d,Ս3KgJ7D-Zni›Uip+YVX|rXJI6ex*wru?E˃ɛEf.z>]yvTh#a d/57:Ebq[J͖OAt`>ޅyA(X~izv;OK3y<+uknu89HwԺH ё$KYH*SAvxm Qu@{@J3DyL G MʦJm }o A 06=o]c%gvi" g &@=*]ykfk MW" xR߸͉q»0_OC;͈vPwt=E坮}Y׉BkI(*yTHc[Zfbu,߄`2"!Z)21YNN &G'WD\h&6F$pP8p_xpV[8QȄQ9]4>`aWL=5}qYdPapgrb-l*O-އliOdRe h2-$G呞QP9Pq:2RpZSo:/BBPn ,w:Q:«.Z1q f_rl,, m8ʕwB%W3j{Cw2*Qi%4 ﴱ4c^Sc&( p˅)D\9"8f 3> *mL=ɭO+6AN-s|ƽ%<(0 -^J1Яfΰ8~ 渓L"U` >6Ϡ)soE >`(!t81  e/ F0Q:7kq ,#_ߛ͚_WPxI^"MR+NX׃ͫqk+^^}nwq<w ,-D~V Fil%[ί5ּa%K7B#L{ͬ8cclq3͵deGʞX|x:.< o=s@>cNTݠ\{g9aYWóxQ 48rF[Sl%!qJOzGyEGQ2%Fp1+YQ u4T uAz$,P9 *Õq`D b REiaWLPod4J@Y{9iK\@+=ۜ.}8hx\p#8]3S$r4o\S  O3$C91JN+҆AJ;4&|mjaBsYX-XT')U1ʕYuC@4*Y &tkة@%zsOSXb$ ^:a"j sA0Z+*$^NHNzNzϝ=m-s7p5?UXj>fjBڹ**$TF ̊hX3}f.q qyy Sf{Nysdo}3p=zfَTOw<dpf4Wi8 Lu>R-,"T'rX9̩{LNgT摞βQFGυV()5; D锨ăC5x =*twƽMv^n\sxߛ3-C|qT^;l(o]kSI+|ٸPY"q×;311| Z@b7YݒXeԏ:z 4nq/))Nۏ&YG_nMdJ^R J(BLС-ь6' eLF4_`NK! h <2 SJ; Nxk,7*DuJּ]`YE^ 8ӨfRb5W:F& n -r3,sV4xP *R>f_.+uMCږ62gXwrVv__9WehU}w~K6}W'Āj`Y׆ʾR2ҕ8lUTF3:":ޕ}Ƥ{Jsv-IUÄhA$S(hed2G)/IW\)ܚvO{7Y`cY:u P B4%0 UB$~A&PI?r$$u08hI4?;ee]Z~ckPlKsIqVXʘqQsA32F%?Q1\h/~<7t-IgVƁDg#1BFխK%OxCzxډ8Ϣgq`C -POť .}5mrUIP }c~)9;K3ጘ_0984@SCx淇^1̍a5ՄbcCoTWgϘt*^b/lCPru+ިS!g1x4 ֌ `'ELF X&#F};}{Ito_zm ⱐYpԖFKX́8uIA8磵YAF;?3RV!Zq6ј7NQc$׌$|p]; jF]f狧Nѳ)4QFS"r(2HI$59q\)ĉ픷坢ofz5UjM.OW;$5K(v |70AUͅFL5ƋAght(F))&:TD6pBDiV4Y#XqjkThA)J?e*yb&jJ5x%x8Ӻ$կMw gy_yYrJ%=|yy\C^*ѽ50'Ӭj˫v~V?=J`|<@? h2'qr]igc'\`cԋJ2IJG)\ TzV-K]We~WU[ /o#Emeq* ˭߷No|?&m7Zq9l\asQu*饨O~8K#s6AͶu M!nmNL<}R ZH3gw g<'Bq"FI14aTvMo(fk>o4J 03kJ(>p IxdDx 7؈*ASB0 }d$<1 l:*R z#DTE]|[(o,/{O)w%dX9LX6Db9%Q22kƣ锗+eienMߧ5evW%[YD!J8D]2呁 [*SOy l΀7OT)G*URI.*'G!J&(.iyO c]2'-MχVևLPVS#A0+ IqNJ'pqHLm_ 6L;:S\)c`J(2)(; KSi]̝2Ʃ(KmFMi݌Mu"%Ni6wbg) *guZu?# } Hz pyC TяŤl9pSF$wiq֟?{F俊۶~23 =pQ$6dxݯ-ɲ#ɊLJA8j,Y_Qf^@yYl߻?/>65,6EKlcwɓL.viI뤎#eeK&.{kCS<gG #<_g"co2Fn=X$7]ӂ ɼIE8j qn-WcH> 9׃ h|he_-7ﯕD/_Tͅ徱\ ŝZѐMRe9 ΊS:Bprr O֜]b^f_'o\N^gk-1pφOfmW $F?^Zק0Z.^)6ĻkGRzH盆~0-1bѸQeI>]/zz3|86.Q>bF4W%vi1GI[?^]CxSw^`ja\5톒߿o.:xjbQůϗu⼋ j6E8_Elj"'yjE# ֖^:Vzx= fJ-rRYx@ 0gM<" 1I3)z=  Ҵ!؞i=u"f&#) /v('YHԀ/RX8>;juLqvy10Sm -[0φ>4CCS}=vHr3e۰wy&TΕ|6\#RPQiu_.k?JKp 9^is5㟡FDnjk2M,^rʃrg>xfXI2ǁ,h 1r+SΤ>>N ԾFp /rcH¿މH&EC!}ϖN' RsЊiE*Aϼuwl~ Ko>^!kӅz}ݽ@n[LgistF9byr|x>5]v545Fh0Ydz qǴ<1-hl뫮΢k :EYn+z4K#hEqӹ Ow7[C50SI̹ S eB> i\BR!A9Xb\8ť- BpQ&Kl (jPzD u+j 4w%8N.q}HY B>pQ:R 1+j@r qWsvv1ҐO {ju=cv{t" ǫσW=Rx>OAqQt5F%; qLP1sABČa @%Q'KY$%Q?ΖtZ8dN8}@ G7 Q˧+ KiE K6*bh =פVј$H/9.˯6ce>sf!"#28cBYV*s$qvQ3*N+4J@,:s z&K&J@1u gّ/\>,uFAȜ Fb'KBm bvNjJ@'1ʞj՗z9?!#}(;0@k^ffX%/[兒YCjP+z=דs8,.$jLX!xU)nI&Ω{QEpTj{ "]is\1IJ֜:`eP!tg! J1@z_Eh+J0_-'^#`e]3ʨ)aJ 9gQHH'P:Ѽ1ANIetFc ƚ8>C^;h5qwDMI3o%dFkD8Y8$&l" -m5m': n(jPћFoxYP~@2!͟(@OLh;G R me :2I˄ 0c)XţQU^Rj&F:lϊh@`s1,} k,)TsȹݕB'8o3/q]w nާ]I#=r+8l!Cg>r]?bM``$i [Q!!k׾$[ʦ۴)Io?li h[q=|&IGHjwyFHꁳ&M|\c/*Ned94A2DW2`h7=T:K uEPP瞴G܃&SYrrBĸ~٠!MRʻRډ"% {$Uq ysR6d\r/`neYsmCPVTG`=zVZ3N.2S$'Q,IE$D*"2D) q@uv~ּ$O1VGZ4\̨5,%#\G}**w.lrݖA~A{rVA^{*x2sE*w1{kc(%"DRZFq̃A!㕣)]"j]. Y>lN)& ϲDƒȦHb2H4-sdUj;c_Y+BGKfo^eJ"gB )4h-&HPjWQjGE%O^u 8muHjd_X9[^.rqkO,niIu/ADEHZ*L \{rPaݱjU{]8:9s{`Oݍr2.tx@Q4 Ķ7h@S^LW$' //oW_R ԯp{[>=R.u5L HZ4GyGe ̠&J !k\Jtp9j%ќ^~]Oi8E_ i!Zyl~ _2dBv1KIBR"hWeU2@8G3ʥG (!]T*f]HY&r]I.1WFΎ&bCtHi ;\0l0~tf.> IA;Wjmm{M?QfOLѧgښȍ_r: \Iyd+V}9k WeTH^g4f%/Cs dr4>t )=ZJ +1:AZ;LJGME[czF8#$Y=11`6ZkF)RD.R1 $  nK XHt^'$Ɖ(08)wK9 Q Ƶ58z3jg<.8M]mq5L?*oGvW.1Q4i݄]'l^gxw\;o2Qrr_?jW7jмǷ)7Iw9 7侳8}'{_ck+PUΞh@wM^{TMӵE׮͇Qo翅bڷtapl|aЌ\n?_\տQ$R1\YwAWj›ٖ`2V+qT77 'JЗⓩ=g]$:~ yNC1a:}3T׏W=U6x 4ŒSynsC6sn?[~W 1R\4YدgqA6]x8Y>aRaU_#NcЕc?_D~k=&fzx|--lbp=T/oG+$>I΍Ld-s >6ln R*8o[@n1(7Oli-&Mf2AZv]dY265u]mϜ?=EjgOtr'f09"Q( ˜[֦d+#w& /,O;/$_PKf-lt>`2-a 'F cѹAq'-101'VH/I4̪N$gmM6vMVM;r0_za*D zRHiL*o!M"%,eM(5rd֋rhHyj#ێH_xѹa«R +.Tw:q(v\;1/b0F~εpiv~f]kCVR& 1A}̣jnDh猂c3ʷ!ǫ$sq~!Yf1l)7a4aȆ& 9CKy3 դI|0M]=#QO4JR)nw0sޞ9d^$i'|͋D;s|ŽX֛-"j7Zc8Ni*=(Z9Z1p^*bIc#ϔ'*Y7R"[@[{]0,#$e U  .%CVi=v0"N-9'LWk2i@vݶ(";k]^z"30njojorFK)EQ)%PJ-ioo`o-"1:q\NXNޢikkb%%" $nԡw'Hv}voV@rI$ vY4aV ^ ẃ5XVc[rS4rrXI}ع wyqWoC19¨A4US>T YQbAxᔿea44Zq$_pd]-v9җ=NZ[(r ZR~[fhy>l{MH)ܡ4 %W9v%痳؆% Eб*CA0>n/mkɰivw?h!Jg`1&jAi.+D9E !;DQ%HGSs g@NwVw>fX*ftqtdvL)PjFž{HڑPBӅvԸ*z^J*$ T7u&o^^_țW???n#4.5\',:8 ~Nɢo'a?T}?퇖â/R,95YJܞ DYzK9Rg|wBO|{ uWe .T6p!M0Y0VЦ}}l}y,kpnm橪(~/UOhyG7mxfv;MkCvR."^e.Ҁ "zq3^bW_vz,12#l*e"Jh:]%BtutE`,#JΆܥ]ԎUB:OWR# % Kz9}q9Y([2~+÷!#@ DF߽.&*k?4 .>cSwO^o1l0**q/fj3GǷ#|TGa_׌<*<ü"W}TܦT[TGѯ 9a0԰:H>;bapsߚiaaB`_ xf#7zӾgF䢗y LqˀY,C2Ncʠ#&LLd4GN# &cH)Yg fXgbu2u,N:cR2+Lq>t ]%ݧrIt+L Έ`W\*=?]JzJ(XNX|*Ux#TB$=]]I8ʉ"\J*MhA@IEo "]|"#&*Mx6th- P m"]i͗7zOW# ]%8̈́t^J(ݰҊ(}t 06qj;XGWۡ[j[j׮'+2+̆ ,/UEOW'HW ),͆*H`yL㞮N j.ax\tƐFB3mQoђ| 1+gab(^>aomY[FH(򥖜N)bxЄ;_@2Dlt @뺎0F7NQS)2+|j*e8Jh4fJ(%J`)Ɉ`]%BW <]%_9EA]I#UXB+i Pb{:AR 3X}pv^KtВ%tut,Xl*"ОU׮^] !:VEG#le}tzڵ1Ō*:\*tPJ ]`i6tR ]%KJ%{:A-8wɷ%g4Bަ`h: ĹtB{i:Ԫ铤ihϓڪ2|)\gmaFT5KJhA񃣶eWuJܟ퓆NΌ8ۜ FY) g1)U4+%պV V Z)rڪK)φ\gAB+Q*T+8<#TCW f2yςHWpQȜ< $ .UB}̈́R~HW LA2Xl*UhW#uJ(qOWHWtFt|c F_jO(IOW/]kW[)+]mXgorN߂xOWv=S]UX.t:t()=] ]A(FUh6tr ]%JvJ-{:A $: / LJΥh}6H\h:wW"?%Z^QmcRΛD uS1Zd1ȇ\s+@qJ{:Eg@KW'o7#Ҋay_p> z`sq\{;Hi*Se^a10Q TZ13w,޽ݻwPO~A6/.TkSޜPҕ/Z&ue`?W?3; wp70V]OUpsa9' ,RP-RpgUE(aq.2U27 f׼lfEJgWX NxU<ҋX5X{<7{pW0R .#⼧Ѽx!%yM**[L,;F@EʧC ݤܡmVϑeΔtp\CE9rS=vB'LC)?3Tt+C:TTPJ ELPdT`'x`ᰡ n)%ӛk|ϳ{|F&IWfןJtxRF>6ˌ `Qx͂Vg6/`VX % İZqAk+o&XUk+14!6Rd<N4z<%BAY v}VsBՍh`"?{WǍ%J;-`csmLLf"bk>o&EqW[,]glQd3 xȗBVu1MGA9wΌbaxP\Z6 \;%K1PL1:f0 p=~v[f-ٗjAstl!ׯ]CR9Ch:)q L%| )a cfX(-r/}<>ǀŌiEjƜŌ9{#K)VK1;G3 |-2G.vFA\1v˸=}0yT㊳NL`??cJ L4VUϙx<G\s `IcJydlQ b~όl2}!i.o7W{&q*ͺ]z};aYg _V- 7HMb&0`+z3bgwPQy@m>`Z 4As8 m88RpE AR:zCXc/K \ǐWo$^PX[[= q/YeQ\Fb+6@1ם͎񬽶D#0\{5yA&0Ҳ5 {y!Pv RKDet((*<:2kX|.H1'^856CCZc,j@F.'0 86؄A0.yI +QJjOhbP*Tf47O_ [2:l*c(@XhqՠY2 ΨRX6[gANabE+f6 uQոEUDI)bh=9?8+:ʫ#l䷲ J%h ?nI]ֻQ"1v6h.4ck?6i18XQ4尐LCLGwa+telRFs| (QlH;,'ka /"ji_ضwv+ۚRNE*>U϶]`B̔ZXI| (#Ky0 l;@vٕ,g IJ3Bk]\e lRâc:,O== E `Z hduBu =h t|VuVL3-wO-@qB'Z!]td1C4M1 HYPY+" 6RꝦ r뭛ޫ.<2E$g 0<m>y+]:1AZ@ay1>y7}{r;~ _6x(ΤyJ`0 fpD 662lB F>{$|v1Y:: kiN5 Ca{7a(W#;<!2#rIpݰ(a!/GÇ^rC ndT}lz ynz*pڠ@a w]-5,0z侐?Д n F=>\2 'Eè8 NR>03It ޣp \8 Rl6ۥ) D,0rj;VàY5)o"j&8 0ubHna/(HrwţB`﨧`cO,0H5G Yzs; ?F!ٚB ɵhٌ!6 s|gau 7Y-{u~o<6w榵woA.6[7w}jYzw`B?y~^|u_{!O9Vxn5Ir_?`~v9u6,W^63L!۰5i诖/>Zog}rTz~<>\u1ݹMywj ($i=0\_V. ʔ4t7l|`^]W.HWЕJP>"Ud5k WCWWHuc "ϑɜd8z̠v-tUF5?]'N}|e7qqpyY: /dCiU<՗N_] +ZJЦpt%(sT:Cr6UѕfZ ].ը+AKJP>ھT:Z]n5t.ZJЦ+>)]#]~t4)7aRJZ5{cq#qwWxypƄ/ 6oovw1W [I.,]X Rm?a jf_r/F(^c+o &_k,P|w|8&c,?^.~->G /=s.&}1]a?=voq=^G\_> !垅d ^7!I|ϟ6?_y?ݟR|Baޯ?fɏ}8O-'ٛ#gf|9ujl/mgB%B?>dd-S8Nw#m6mmNKjyhL a$OrKsKdr$AH_FBug$ qg7fhު;Kw\`VDW8gpk+AJPfYU5+9Y pj9B J딮ΐ4ѕNi5tцЕ%{t%Htut<dEtLv5t%p_ ] Z>y3((9U&"`CWi-t%hod銟8=yY__ o6]2#b/zK1y"ljJڴ_y=%[3+Gٞ@8AJfZgO^] JJWgHWgËCnq}mljO^H7WGu`=Ubslu;p[o3ڶ\ #˛応N5e6^wAUQut0ue6p8YEbڗz$q(٩8CA\^{ʫ+i-t%h:]%s+pI7n4+AkO~YPzs+}".^ ] ܰfA'OW@Q:C > 0.K=8A#]? %t%(sV:Cr++jJCRo;eNJWgHWާlj^p̅ec_rGd^ _{eMG2/{F08`i^.3s`LX`7AP$.ÉnIɖv$JnE_/5;wZY+{oA!t%zpe)&< YM|36s6Je`̓ – >y-׫y˞F0g^}W^nʬ_{U$ J,(?/MW--{s_ł Ԯf[P|I5@nN%g~r<),<Zp__z f m'=P}>Ez@hZj5#zA63E{b_ ,=qLi#!g9yYD0'0";tK0 ͓r B`EDI,}E^)1X`;B\ȲO8J+d X/8 9y )E"GubVF(Ͽ5ވdJjt4劂]1^xc~iҨB`bן\u_u60ғ *GCŰ?;_K߫v^.<ٲ눣Pҋ4DМ,G6)ۄ+w. |ld[AZW":htJF.lg J&)Fc1)۪0Ԯ"%,8p@Pho91hYR-Jk.+[ I߸xQ;2h}pcB #Ypjmp%sn8ڍ#M;~6=wCq:a|J.M:AmpWK8HaU#86bsdx>})6a6$GBrydxwS '\)I^n p+ۆ7ҩ`jͽAor6 |y+vq8ZE3t\IgY6MrowB|4Q5^=| 6Uظm0O; w v'8t7 b[6z=E\MDz_ i2דz|؛@Xj[µ%Cf fR;#z,H嬷_P+b_ 'j=Z-:/Wj wop u[u[җew 51C.R9JH23*gh齳2[xOp+}b6;mw޸1>4'l!ܯc:Ut L3U)+&FoY.m)7gxR>A,ZbDu;w/0ӗXJ|1F!Tp䳗u;I޳3Y ʠ6 6v^l/{qMI'eeYZ;hAiD>FoА,F\Lɛ$L2֡§";Gl_OzY5(ՒМuªhu\++PHH]$*#ZH Ud16-?qeB&iH2Z51 X$;% G?Zof<$@\e҅E&!bx d |<vIIeAnq IYy !wʗtP>SuBTR0KZbBM11l=0,ϫ?ShKEw¬OMAX3hTigގÌc$Ms~w" D{t^54԰S\5y>!OI<ε$:u9%(g\2q2b'o L&x'))j~o3 Ag@kCm&WM5&)\i]yC3ZgfojMtzڝ*p\+ǻ$uhмy=g=1>#[qF&oT9ʾity:L[sƓl~*8@eMs~Y3_JXmlZ`9}o nI=oFlݬ2 Ąah,K>/=wޛ4^ mJ8q%ӓ Dž5q]c6!;%'3lҨu74j'44;7:#v˿x|x^O/ސyO߼ Y? L1]m$xvo~IؓOuSIEҩ]o`'jf()_w~}x2H?&́QѫwÕ!/W@HQ_Iˢ!q qmŜ vX\-|_Vdwa\Zz>b3$mt}Hd$g_%n$˴p\AE ggG@E.:Lp:ݓ}&5OΑyM Jw1Ըqrj꘺8;Bq,K{mQV㏿6e$P9f\TA RzI^Ѧ{eѽy=Ro2,$\`> bU6#Z`eb@0 L &-ŭ"SR3K< }V^QZ A@fD9E05rnIJ|Qz̓HnTecROʁŠK*Kj}me/3띈+!W UކT@:}fKIGÁkZ-Y$bpvw޾ٝ|4uW%uT-e];>GoƶߜC<#1e{[]Pl6R, 4;Le2601<Z\uy@eK1)8* c+Mp1Fi ,y sUVy@kGD QiTYV%w!!dJ̅7@pKD o?FΎ{?"'9#rwGŜO]v"4CkTlz<?' ^L& Z0[k ٻ@ +4F .bJ*`IJp z΍^Gps@enJ ʑA)h%x# x<N$/V ZܼOŧ=gH2̊=ZWMN[M› O娻'2*NUJ H+G\Hs.>8 i…̉).FhYk2i_NfAQ#LX@qWGN N)S WO e9й^>$i˃6Ws-l͎cwFdal֍0 oY1GHxUgyxux1Vۀ#vըXBbu/>8Q6R1+b @֞2{@3Vj{T{>(/Y甼6ɂhøP:cRXb6gLl{!_^R !oY׺>o_)!QƄ%M 8rT`{*%g1cB\; qK5?fwh_WКw]v!L;P]f⢮,r]w ݺJH!*& [uJU!Tep>!m=z`b[Ȕ 1cpƐ/3"^eۄ X9ٖ RYǘ `J> 1/'VA+NigѡT2ARK/ Emr#_Lq^M]j&@a{)(aFkc"FPbBϵ'3U4& (I"qH,"-˙˼h,!"#38cBe,+9OKθY(kU΀c$K@,L¡R(Peh1%/[Q_=nL5.!s.R;䙂M,皭M%`~s#跄I,o ]a\.8j>KJGo/~

3hd\y7zv%d&dTNfvR&ĠB3% p(R*YV*lu!( yZY`00zg5q6?o!m\H[NO\Fel,0ـˡ mI6O,e`O.U8TI X'w^x&#)Ld:2nЯ<ɒUYIɧU)^h. \"^p qYީ΂ʬ8L) =r_|T tq,3e RR@`QN6h PYI& H \dIe+wo1: !ldE`ʊZM{Ĩ}bğp'_*R<1RmԈT۹lh6r>n7^4'gctr4b˩!Է_W|w.bv`1 !4RJV󠓵Jq`bJKCVy&ǘC`VgYrAГ71 O ޤsRL Us72*հ Ue,T= Wg 3ްٮ;٦;xeh~נgg˳ӧbȈ-u@$ť=A2yVyy I(1 JL0n%VS%h`2!EM j.L. Θ2Cck%݈ƣfb jWںGnJ1gYlBhJg@ WPuIbi :cY 92šjEMđ0 I/Un'/$O{Xid' рgB h!餹).6$KZ@)E@]8w#iWBYm슋F(b[敷$ƉDb:G ;I/WYOTپz\. V;9aUsx{0V{.7$"2yT^C0Ap}3E? i@+qnUgj+=Z)@k`c hh\)DvFw +0I[%Z#ZW#1NQKbp2xw* n{y_LW KAeW14\Z*˖;mҹY-8Cr6QP1!HО|bZ˘P `NZ9H%zkۑOAPLp͒Fh{L \/{)ߢU]RuA X2JgmV+ Vo[%?;.J9NHl"Ѐҗ YEc7kf,EЬ/Q-R V#M3l+߯"9]Q5k?Ad̂P 3-,ӺYp,XyXy 2(%VQ*@QmNW{n M7e}}o:QRI5Fcl$8)Scb+[noф>BK,f#YHRp^R&Q'ƓaRH.mV\>C݀RJe2\X2B JerV@]⥖8VhYZZ>Ep3O^X%)FѠ298ZIq2W6 Vh*m`Dn1)wIˈ9Ry֢BCT Ҿfe1- \ D4 Gp[nM\"OȜL;FszeY($aٽUGV]+a{Z}Vt T DVhQ#d$#e^q{WЃLp>e rha36L{&ǜfZCqDC<īnz1͚D"d5R eH)C9bIv?f}p{m"%vZQ:z 1ɉ%YhseY!f6ʈdlD7j|5@m,`Ir4 *x~[iP ҍi4QT^ҊSK^ ׯAnQozݻѧrqٿn d}=,U~ggVDn5h[(J^ix9?S;Ǟ=;B;Z*~([ E2"p< "kgAZ/~

#Zء$~x MȹqJnpx ]Dz%VѦcR689 'K6_̯눤 nnۻMt[GpSu1[ƏAݵ[d<߫gϜWk:|.O%y55lzP ha=m91ոcE 9xRd7!!Y%>|w2{ԟ=\/0 ~ƘmN!- 2؈6skDP.2wS(quw͖kF OZxd,m&,Li>ϖOcbÜac0iFSd}s*nW/^tƧ;_91VT>ybby9=zq\w$Ԇ|=7G7+2Fv=Iso #K6h 52f71q|hOؤkBúӬeG"kt~M6MONV<שO˳v=ˠ *%aY8Y(TzRcY*7eBjY/_ У #![bl*jM C   32;kC>)[յ1ذu|SknVxX)cgF2>wI]3IeܡHd;S@UlW "Z*RiLW@PRv`Iw\EwZ-WHeWE7;WH;WHWHmvT2K+k9b f|wA$W]+VٶRiyW \5'V1ʟF W#W煫Qkè]peWM=0Cpk3pZ+pZbWH%\@"XP3p2+p vB*%%T]V>>wN^  Fͧ9 k1;ߧ&NE/9eCB*%6 BQ^,κ^tlMaUZ1:C1෺ \9Ndžw?ٳQK/3Kj):26kȵ=@{x͢I7{ۦoA`t.e.i꨸SWeox?+UIMuLDJ~Hxw~}Cq^X[RGp#D /4LʮᆑҌ`IvF35!]3B* 4I;Wh!#W]+C&+r.TWا`v\KĮRK[&G*•LT,vMԚֻɑJ:zpexZq`v\wmWH WͽO𱳪us]1%\Þ˧o롌q|,xAmuJ-xa-B(Ղ{#>%qfՕ rVi`_'4_3\]8~֋c׋[mt5VՐ!j %5nN%l7vj 7ҕO4:@ V۩gũCNlmHߣZqLNJۈ.3Fe1M02l9Ay#5Iւ&S3jP;uO"DeOSeD.4l 3l\.@9`TѰN9k34mQmW6+عulàsw1<#,UbZKL6T82X) Ɂ $%J~=T+=I4y.Ɯ>0 %H4CS?1ƕNp8pxS"w,TG: #DǍke2ZHd"7LhAe8KiN[ujǜ0uxfS4#wk Lʵ[HWr?C?S~;ӀWIC ljivT\ɆtQ V4)HRʵa ȅ}YӻR% <%Op6՝d#|NWU@Ro*$:<<8T2I_ 퇵F/Z8b%p[ Gō5գptߓ+?W^^WɤtE̙7do9 KS7mn? BD Y:g:[6 [> KgO ["F.I>=wֻX8&Y\6벵 X=u.4X4<*/`Ga.|@i1cxSmVTO6Yo0\o~t??=?O ߽?[8u;p Hdk -3?ԓ@&Ss#nO50U^/Κ8?*~ |u5TקX=TBӯԽgB}b|5 mA,J1,Vb@Lh8&6 ;=d$c,x4;eK8YEN>$tgC9XiチPeJA9s%]ɢ`uGnuD[l*HΑPp2)V4DL%2rCm]Hv3!ـvMåq3p^}=쩃7thkX@B>f fwf/\6BNxZ{d"ԫPK'<]ċB-2kV距}\ zYv w)+v R+[_L]ϝX! 79s>g]n}נ<14ᐡ١jK*. t2ҚuQ:dI1ڕ==RF4(.(1[Cv% w<{"ˉzBW仮2Ӎ^1:}j% [/cݸY8YA6l<~:h4Vk(DoL@]T!RN!;-ba pJRe.qL28lrrTkdP܁P@\9KB=;"/ɚ]YzU:fR7r+:G39„RW~n„=PFa̹N2F H&j )[gB&d,ʌ3X9iR&bY$*qɠu)͚wl`(hw`5ϸz!SM9 Jtȹ_qM*fy!ͭVՆcx.`~\|;Z YQ6jMfw^gWT4GT8&TSWJ/] &;8Ģt]nmRH7Տ[R WdĴs0.*⒒QGG{R $H Nri6I0u3)7:!+8d!2d9%,M|4*gdr,SB<DŽ9nIZ[±%P &%mgyr9kYv)+.9=e@-k-w1dc+a.|;hR|/_8id|c!( Tf&)LlSSE:;V=Vq7p@yvb_$!``LeiYG!KE2FxG#Ѫ*.wRe 0sL3⍡x K,KIQPXPI$<3*w䕊ц_Gh_' zkE6HM߬:9 0ظJBΙHm'hO a)ɸ0jPK:/E OCZ:*hpėyؼ zޢOTl~'}tebMb`@2]{5I^"$X[wtI+^3GNcӝCetn$->;T#S>q0$"'zDĭq# .K@../5H:9I37bь6jFv:Hlç*i'[Ih,k-Wu<"i3diVK&Ϩȭ)FːeRze7D|LL)= , $n 9GRV6NfmI| nix'HK@6.(fEaSP7a'Z_8xUuJ.2^lXd"hr0u9Nb$8y,Y'w^x&#)Ld:Ȗ>&MbJJpT*4)* l%RZP $.2љ\qqw;tqխ:s`.}64trWVyȱc,SϴRE&x"pFkXJRxՅ6״äY)RZ$FHR[-ijA XyVPLsY~T'G~Gϧ[\0Zセ݇4']ts酱Of˩pħ?S1;c0B&@ish;A'k&RS@ZKCV9O[c!0HZgYrAГ321 O ޤsZ@62Vg?2*հ8 Ue,T Wͯ 3e70>a懿hzbȈ-@$ť=A2yV&ƤvV'Ux%&>=$=( AHQ`Sjh&nǬ˂3Gُa2k*湠vq,jʨsV95v/4VTZp& AZ jTu7I,a]#\Vx Vq !ҢQ&CXC-8FbV!d|퀎jN_,x.Xm~2"Dܝ`TEcKh#lYK" ēƞi ͏l9cI.ke4 rZH:in IPp@jG%e$zՁpqlfɱX9[\ܙ3w7U&1N|'2 AW9@DJx& \if=S%V x \<<6;CU _`<<Jna6O=8g ~ήrO΍bQpiQy?r4fF[&jmrNo<7ߛ#'us~>nGRQp0IzdpC>XB"reHaH nwsͮCjR˥nw=Ozmi~@k-wχRC{{˻sݎ9םw F#=jj.lzhRn4},CS:;`i{.K36θG_esy񷄓i,o a{{ /?}Y|?:ӣѥ"L~x ÄK<4=N\!{ҜV&p8)a-6>i]ti&I+MHH߭N#JθE\B1\%;{Vhk{ۅEsWm! mw6,ո^6?,.u;.li,`wʼn}g~=ϖx釿6 q=eD X%%644Rԫpy@JU`ஊ}AnK"|5 d2zR R(ov *3?N spݷuPiּ4y~J2M_m?_ħmguϠH6uslR&X#\e j5(5QJkS~CZa@eHYrbYF$]J.42}fH5q0 [HmJvȰ-Ͼ"n#)h}6`a>`@DyRڢYLtΨF"Zꔑњ,Ne._m~?m;@FZ7Fgex@ɜ0cb @cSu(J5+/%)V^к  E"{k'&~`،'̈&Kw{밃Z?.Q9]`Ǡ7[3i/x̙RY'cdBX);ёsQt&гzҐǰd![c !F!/ D3yv2<Sy ]xY363#gꝳƲ˴C\AU"M?i9 t-+D.| Jrԭ;ۺrۉGWM6k}ԇ-cǶ<\uFm3[L>ı(Á.!Rs/|3 Mqg7x|N@4k-Otc(by?n>PgK~><Ά A{nr Nh-cvI+0>HR{CocyI>|3􆘽 j9$.B;0Ϥgd}Z~YM1'LK8ME^G3.($}&[[kUiW]9e!;Q4<Gʲ]$ͩ-QvPʡuD"b$Ѐ𬢱›53E"qhVwzHgt(FCSW@۔]Ϻh12K{8ЬA K[fX.ȑ]wF)g`xVg\đg\rSq+/3>(vqw6=(]Ϥtgt7 7vBk%k؂IuR ᙓ9[((w, }KHr,zk DO9K[#Yq#`u](S"rC$ ୷AJtLB9C^ stV{9PZi]IFD&lr=ma5o6Ď2֪C=\k<7>*g-UZ(8T&GsC2-̕MS&:H~-f";i`3Y*ocZThbȶvj+@f:^*"h+sL LIR@J-c˼ +-mx5M&ȹ>=sa sdsR˗GoB-ͰiUɠZ)eh;:s[}|(`xg}<TGޙHo㻻wS:q}>ɩTI}pdu%=i! sQ3eD@R6Sb5>`_mDn4i6%Eb!N/c!54 җds#4oCP{oǓ-q#i ?_C+rފȭm[-4kF{v`Zô8}.0 dU<`[ƫcY0Նd}K$LUH>D=m)GC:}qdL aΞqtJ+¢^K؁]IE]h3sxA|Ǧ aWkƯwz[F&1e]F2;~a{7ǁ=;;wmItZ1j I1[veUI& R:7ZAtm;l }z2+~b)\=%eZ!b9 0Es!;VvVIG= C sT0P,3;o<4vZ.H{x] t6 /8N;iT9s*hve@8NRxa.2di茂0VD*s"էxڅ-e7d|j =OSfL}dg}H?]y#[3<@$;B꘍RNR… lP.H@(<0VCDMf{M)S$tVi0,*I#eLdRH3y2J:g^ggiҺŽU[?&=xp>/=HK L>k0R8RO(/f \e}  U}< GtUgu ]!ZtE(J:GdE&ǟWr:nȻ3@} BM~?FHL"FK$7.XcvZ mʒfd( Xy~LOjɲþ^Y߾=oN-h߲llrPNͩsui0~OǕHPuM*kpw`M%T5^4S#%GLaW޼[ r0>p[޺ f#+3/~0P') Gt9"Jva=] ]ii ]\卺"FtNt}+PT=]"]YDej|A.E+>23xI Ѽxh@GYtV>wLLs] k7jy/"+n!U5^fqc 7) lr>e\D(mY…NXnLL“(Mbi9.#kde9ߥ IۢA&n<*au+9z[Qz{=ׂ0Wyo/_F޹&ܩ\N* nz|P?2XFǙөI Y5*=A!fi8] Cseo=EA>yҒe/uK7h_|w8``N/ן>C}ӤeM,guM$hG3o?!Bhv]JW `)z;pͳˇ1q1ғP ]iӛYAô/]6 /~qFT O?}C"X^~'޽-/ϊ,C-L\O+,8^NoTq?jx?c3dw񲒖 8b plU?xeQ/z;Ps e?a.x% O,[fodRoB)]w32LѾ뭎kmFj\{% k8z6ي^\"2l1rc a6ǘFE :~肵4U%])w0PFG[OiMhc&/OGZ;e'&w7צVNx1Ku8gG*z[UWr\xCCƁj(}4\t;`4{/.*V5rGSs2#);lNQFWV:͙lgMR^ hǗKkq쬩vprZu$eגW rpӳ pJw4'-?6h+ToUktV&]>|>%=&EhD}iuK eλmRѨ֥F-Ԫ7RY.A5ߗ+w/lnWh1u/գJrj)k[[Ԫ_i5\- !-1fb .;?ZA9%5/tD\:.gNhOfcvmM_if=p*0dN"d$.5Z$~<^-lۡ,aLGBb܉6 .6%)\,_H)rKsmBgf3#H hUD H1˔G&,7& ̗`U&Byl:`PœGtEվЕ0xJJtutڝfݹn\J~SX{\c"ɩ+wiO8r| 5s"вvOV8\"IIApEcc e5Ƴh @%[ƽ+˹/tEhtE(eOWHWkGtEb&LhuP ҕ8]!`QWW{]+Bi{:E2^͏Q&"cRtuteSq )* e]b_4onZ%tA7*EOi~J$e3܀sdQ.tE(+Gst;]M/(&?:V;AvhNekuL]t%zں-<\5mv?$M+FX͎}Βx:z[%\?2%!~9HP˜G6t0KȢs7ޣK04]sw9㟇dz9E~¢.f*o*2\kGYnBHHoߎ*h߭5G5&aNU=S5z/GW|I},Q`;,\_pC^=2O|摃jYoզͰ.fE|`!)DmlFF6XZl_` 骦kG.y'+$GA:CJ1Bd[,l ̂<blߠ/^:ۚݸ1𬛡*1Fp7b Z.;A1ƝetbLpQ}j\lA*j:<"fX.̅q"6 /.ղvЅ*[Z (^닊jB^խZ~@mUnFIDfCK-K]gM5xԂ__}Vi&jbUہ:T-{gYTհ=ճ_]MwFk0.g5x44s.D;#D~NH*-C[wnDE"Pۼ^71/ImqLାc tFv),ݟ?ݛ<ߝj g^ßϓM~}9̷ oAj/K/˅%TjmZ{ӛ-gWGz:FғVuJ}$H,hEDZvuIn9k.{fX9iOuUq=}K.;}keM^'kW- -:P}nJ{n4< ˤrwmZ_r-'lJ.qc3B`\%k ɬ$3`OtE?tEp7 NtE(+s=]]) c*z&!%ʞN: =uu"[8#BFJo*o5tE(+tu:te4{DWXCWx|QZ'B"]9giM\ݤtp5(_ \ NWB  ^9cL?ͼ`ΎMW#;Ъ#v(uǶ1-Jtm &RxCW"ZvQ=] ]IEid"BwIҕ#ƪZ*c_rpKQmﲽJX疢pE07>#BCR)C?.޸Dֹk Di95 j %h]!`%7tEp4+͵>"֛ϏʜcOWCW&ȘGtE?ꪀ|+Bk;p&8"]Y!i~ k揺"›;2btu:t吭Ov=J$"te? 6= `gLW±Zy$gJձP;+jۦ16"VBW֙1 ҕ0w ]\%|+BmPZ ҕK;JWIoJR-w]+B){g4ʂyR *T;s4\A(5 _Γje@2YE_>(Us&}sztT 6];q#e0QLy`.X_3B;J{3];bW=kNtEhyt=] ]iP,aO}G]Z ]+BilOW'HWa>m`叺"ڛ:BkY@٫S+>e`m+kq 뼺B鞮NjGjwJ?{WƭB_nrnWhcIQ'=Nz}jeKdA~\^[vLZVB6 y83Ҳݕ4󿏻➪c9W] ,fWa~nw&-LiaRjy]=U+HB qWVϽw&b+#讈@)nC~.wee:we:vwR*ݕig\=.5hZӹ%x ~Yv};@ZP!b Y2L`ɓV\Hw=ǰRb91ܕa W'JRXrZvp@`I2 Ux" g+;8_uFІ)H`X2ʊ 4zwe]} ) 0Հ% Zi]kdܕjzˆ=C:p|7͋GE r0w<@^,rK@w g7ZQ2y =wst`[Znjx'ū95Kt~1 \+gaW[ΊY a03wbԘb"M3_u>*6@\ C 骋lgͯftOdBVr16M5x 6⤙{a~a 't@b6?z۟f pkp^z71wo߰gU{}VʈV mx#K)%RE l/_:n9,Z!|^f۲S)Ͳ,x^}Ϊv"OgU9V~|%_Rw6\Η(n6YdZ?mnN'YQy6MFͱl=0|=R]h(Rl<*qNt1;pS\A럋V`)Vժ9]gmydyanQ ̝۳r .DzXzjLĚĉ5k'CX?YLfVu`bUVR^ F& nFj΍U͘iG]+Iܔ m__ԐfK0.[;թ괘[VnbL545 ]xdJ 2Y-mq_NxVg2~x{q= RpaB vb-ŏNzK o|\\[gm7??~4 .׉z~7ejSl6]979i0dkDso_ѿ^C:ѷ} tM-3>9V}N%;jZk yJ7"dtE7M㝮+ʍhx氡̻ JϡhJG%v[\Q)ŎJx5^IeCي6؁f+By:@`hv&AeFS$ 炢ݾ pVi^lpVY%ABl(`sF93MiQav,"=fe*Tr&h {:&}F:uRxKҠ(jd(Bgp>! ΄~pGv&/AŃY1YA*O$깠P玥HROu(Ι)rpkxexUu<]댜эڬd]1L)o 5uJF6ڦj)dm+LM]!Bla I@w]n䲍n4,o,EM(ˊ"vIYJhDU/ߍ6Lc6_ycz>DiznG`y)=A1O&~Ssb2of%ɇIK1K\Kz.N_J1\jcQ3@02l O,3q;ɐ j3T"9jNJŕ ^KqY*NK Cڅ!nL0*s g , e"Ie!sVuܑ\g\:Ϊ=bVLس@ ņSёqYIh$Y4U^Li9k肖+`^o-[yU+ +~Ӆ4CpsJFik[_VlvGk{ ѽ1Z϶|д[A87ޯ _9+ۑg2P1pumig5z뫦}U#1s# 7=Rֈ D|cglr7/6X7vg=\[O]W?< ԳJ$bǞ=.IvgY-&},H rÀ鸷"<+t8hL2* 1Yo_&NIKξLfu=G1ΓzVH>Lza._8,(4nofDFk"0disw24L"${VbﳢT8gåΛ;)rVF.d'zVRd.yLQPt!"b3@mjJrq;X&Yf&g,y}*=20OX< pO#qb2%6ows=8 2YGfBԉ1ρS~7B[:69"4,~ݧ"TjW.*(# z-€/ *RM< N Q0u|$2Mғټ!ĕ ݧij% IrVϒ#䘊! <ϊ50Wi^:%'IҼ oyM{LV.=pV?җTyk$EYD\0 `"[DXIt}gb:)>ť@h7i\]3[}+l+)@ݹPB1,Vq Kժ9}ky 0c1Ւ뻦E5Ifϳå )nbxy YpIԘbe(kxb7XLK7I[̮6"ugYfխt|1;"QdgA3G(M]0Kvޥ Ry;53 -+s4,# Krq팮~kg g7'`<ʳB\`tE=i<p@`*Ks,C=iCdg"G 2ιxqva$43O=.оORϦ (Y=iϫQ 0DxjX2!gv%i#55O RȕiM(rlpiuz*|2t-8,K{ܧf=zO>SX {w}:oKo2[DV!FȊ#] M#"-5YtjڻaCn[CVZ{03 ,;xJ;{6+lwv9wٯF2F Ǥ$e6!Iy%t'1gFjGr1e?} k3D _9SwkOږ P~FZ"ܷl#qvAA,<u@0( t6N`/= B5&g%TS.JF$o YbJEщYUQuOq|g!I.AZ8?SL<+xIz$"g,ʝ35 #!! |s,nnx5H\vɐ *_FɈăydų&GJ{֤ʳ*B.YP&^ddz)g2Ifu.6KqgyV3WyX*W2ϊq'ȸg 7KE|W!`4>VT̳PU>SYd6!4=stb unENeօZ 07VMc) AC]LrOР4Onδ^F^XBy;1}ڰv .XPE (8ifg2˸ r\Xxo` ~M/l vh~ٔKY[v(el"4mS!P-+nTx{>:37i<5ˉP ~KK,e/z7RL)0ॖLH*Qډ_8Gf C V#˔aџ!s¶FpkTt3ˠ;)lcI^5&\Q;ׇ) >/n` rF/QOoR n.vm^Fxe~_~Y_Y\׫1=Ӊ9_br q9 DhHSSL'U1,>{#X޾̹*]Gݗ[WϪOU|ya9ݱ ]Iw+TIY5 咴OSX]MO^>;xt:P;?mC0Qv5>ʓSb<'#o&#Mt?}qdF3cj7۵*Nޛs} "hy1m^v;$anϤ?"~fEf^[^:lFgyzvPd6Iި{ 9 5,& 5hf ._ ;ޕ4'$RVxc9óMYOell,WMJ*DȄXTC2 _×|_wT޼X]"h,ؔ+X[j\a7$E)e**@\hյmHѼag@ꔣNJ"rR%"NԪdZ捜E>ŌG"=VW>ZD5Qv-V|Kuh^0J>NtZۉt(:l*d_ią]˳VN*&1%o!;nϪ>nfQlg%7G"5Ae.1RwNj JFy΁  b<֙j]ءo2ٝ8` "sm<`p{I+3h $5J S5 DZ@8 wq:#gJU|8b\|Qט<5?0ZkfN^]Al^2P>&#'ʫ8f\E8Nj'-ɋ2y-k!qD\?1Ƅ'ٯ1d9~cT<&؄bV lɡabnIXp%2wte{J wç{eOOe-[;H<0w6@cs}ex{n @mN_-񒛀aUl/XH¦'b.^Q&"tCrVzdn f>-`keJ(:4ɪڷNsʃakoO܉ Pqt罾{!-OiTg(O.D(T, \ޱ`mɯBQJ0GJv&GYXFG<8;¸jW/TdD?RqE] gl,o! "H3rb*WU*Ze'rRIH@0(Ltgx.p_< 8RCcK.6t\V0?eTerVJY"s&3BP0?)( N\?QV+_Y|=V\#A=n7fXv`#ؼ(  298/)NF;]^[CŨ#С:bx'i2lkXY<4h:Sf.V(Q@_U‹dÒĖzA4g@('C2o@e䂰箁Af\#d׬K10̶^EV&BRxN5R)+tqg,/v5mhmE*m+фN gFxTYFQU ܱ 8}o:WJ;!J"^eV1O_f٩4 UjT]S4_TP[÷]L`%p' c.,]=/&zvy@{j %i!ymS Hy(ԺVs)ZCgy}fAԥȔ.z{u#$=߼F4ͳzzsCh/ρ=1ZH ANmSIrn˜$GC"$(־4F?@JйFכ9-Z>#$ E ԙ5p6]2xwbBv;z_lIȝH"meU~S=F4f,»N>ʜO{D;ݶ/\r~aF0qlTǰ}O1,Xj ٔ8 gB"'*5CPIT]E\[X2gY,vz@jN~^a% /=ɀ!h(0YNc8^eA?'F['-kY#AEg4Mr,r^'F'25 ɀ.Ry>)c$l9VOofg˹EJsYe8J.S 0B^c,=(V w7Ԡqra>3ê#^L^MNX\n+|팡Rp @; VnđÝzJͧo @>W "JuᄠʛS s)4~Y (_nbnce;*Mtjc *u 9n7A-(7* ~"-ktioڟ8oIJS?:UOްx<{nbT9z_`#'y>4-p|=qni{*߲F3n`*`/3!h侸gL(:"y^eZAֲFGp=7d=Vq2Dոr*VqIZX3Hrr3zޔZx8nIҵfkPN,b%쌖V].`/VzF U8 #{33cq c5TQ@ 1Sh ށPJh ְY }'d蹯T|eUww?(DMHwKu`apHTt `u*VsyVkpol8%)}:q_)H+X*#~4O5xw֭-k|(<XgU^N_3A+ݬ|8TCN'OoBcW_?"۫HH}_1dgr =V Z-kZ+?,۔ѵj6Cݮ$3%v!Aby]:Zv,TXp* $}^6%uG>Yt)XL0[JlخIn_C}vuSm dWYN -kvM+@gX]ѩk7"oq_mU~hۉ2?̎dr(*nO}Ejg&ӻ:ԣG3I7v ΰ4p/5s,J-uIG0AneW.t\ơw/A dh|?<: fGUml u}ﯖ` ڥ}csjnnuod :Kw.{0|K z/Q-k㚮fBgJ0-8+f\BD G:-hb0=wڀ C N?%H-Rc!3C2z}#O+M9Ѽ@Ƭ5;E@Ԫ2*)唘ߓ?ٷ~m1}gş+BOCn|$.A,Gl_>44vhKt,h^x/$WiHu#@<27' F#njz"x!bx<E.pεR /o r8Zml("mM*1ϒɷ0?L?èoGۤx6K{Jf ‘$0G΁ I\=)KOd{-}^&FA;rəй2 mlݯT6vdGO._ar>x*,_FV~/ %a`Xxԛ{w?%eS^M4oi {2n]Μ?8 ͅ)Rc/9&vt_UR:h6J'oŲvQJ%Z0kv){i*7?;+j7N^[uBE}7E;h2Mkds<6GA@}@0*:vցqʁ5VqN# [J(^fI2t}w&.(z髋(UTx!h0̍|5#l9pE5uӗmu0=m5̍NV3/;B2n1v0_rVڭ.xGFF|*3"w=>U,U<|5_+4Ǧ Ø|*fV!wDG?NKdCj?߁>RM'J<0,)oVߟ/ΰVՅ[u1F-6#yGAej9Ca$ww)5+0Ks[E|0RCf9m9Uu(}mf;Jhߊ[QiMiŪH?G )z^OlWv^n>4|5^+OCF -6ǖ k|SLx;km[8l Sbɣ p G1F^Š; )4|'C0\!# [,yIk(ʳ{!JM\TQۍpi@6=J|o1_z@V_MRV_kj!L5Η$T=pm׾WdPWmXg RD+~M_xE}v{?j0٦p0L=ppd9CsҎd D, m)͠'W|K&~AS*pL|(xwtɎCA{DXP-R+wthX|$=Ux8;5f=S 7dώh dm(Q?;HLQh.CT0K.+p :,S̿ 덆oO$E$1.+@eaQ; N ku#A:FoSV >mXVcT'~,N)9s_J-}&g)<"@IN=%"W죝'"WIJD3o)PF7oBeG5IHq _fӤL| 0Nx5o |'M0H}׳$3ߡR hA͋xQ?yL FkDEIHYn" trm4(PJ,dXZ _l[:Q`8ݧewC e8$ehiJ_HnW p4idQ΁%L&J ֣&Z6C9g !OgeR `;:Njip//M:)S Ւ)0. U fc-ܻ"FiwNR UA&ۥ]c r,c'5U?Ln|c@ɳԇhh.$ (fxtGMj`*8('!hz"G/SRcM2HS2Gtə'@M0ӢO_8^O6;U@;D^NdxTQuqVH=$AY|B]%JLo-LLf(J A&78@',tcƵ놮ـ/HkOWJ%3oovǀbe*C*ͽB/8<ܫEP")gVPFvQPN 5Ҏ:0qG"f~a0Ht!#la.Y acr= gVvͷLsH#rk'i^7T)`P1a\ #üSm#`KQL,x4o ]q_Bδbi͛Ta>z0.aL!QǴK^*XjdQ0k10i"EhfbhSf$өw6vPޛ(oPoXLo.PTq<ް ʤmkB`CAܟь*A3-&n(Ux6$SGQ7rS:{B)C G0s< %jXcR).ȇi-mNg'$ЄJitf: =M +, AqHxЂ`BޜxNRoHZ#L6[Io D_Ĥ=t$K^ ^0y'{԰sƛ5Yhk-w2wHb&,b԰QT #GsCIkXc"{!kFtG;eDnG3"%" &,vi=aŲD 3PpW\fjXcb8ͺ9i09T{7zU JJӰwqw: ((mktN3mr}GMEg?vo]uw=DalG 8Y0\r=G|6) J*.p@F ԊPn}V!,}c5YO9Oe-Z86¥bQ\ KPAbGp rAԩBáJ'|UUYz0F  0I"ISiŧ[eEqTkK 1*T'zi`t( bUVo}0ԿO$WĈ1F"cC`36I LS ͍\/Z%j I+1%4v.m?)l?-C@%vFPFS΄ +RLoIdՒR{TK]Dxs~W?Ɵgf8~Zu8;Nݱ0d7yy 9ȾL.C; cr߅gD !Q(.?%^鋻g]K ԣ݇pe \sg); (#x-^υ *J[ǸC$Iw-% ,]#HQѝœϟ7es<%n@|~ Ahݤh|* ~1 ~$ag!.&|)EH10?jp\8H0IlͯK9:=i>z) #rR:=u|q́+X*߫GEQ[n\]C;s3%L!TGzP F n `x(C mU2UFyc3ysmAk/u:nbE>]̜ǂX0s f3u+i ʒHWXe&X /9IU!F) s%ombSJ2Z߉}sǪjYIx$UQ`+%~ſ6,4_3 ]=36lA,;({`^usxYܷ^EDzX}ˢe/VB1^y 1- je!rP6BBuHJ?&.3+sBOWV<4O3%޼1JgɛqhK ͛PvPKo BCzd~f麟8C 9 "6\SzTF=\&`jYb;r%}N=0fw"sA8KHП351]:T:B12y_?WޣIcnaw_Shx+a=q7ȃ\(\W濊LA_[ mp0KL-.̧5;m{Wf&N Ä`7K5qdNvGEd#6Q I W93Ɉ(5yTXN3M#_!tG? ^T˄;K j\b)d \hJxaX@ L!A[dJ V+(1jX㵡m]3mfrLޝP2/ן1:޽c0UtȴD_YZGҫ PPVcQ ,8aBr&Ȃ{ KP5I]ࣕ)צDw7XłMEUh(TU ߞXF&Y`BRLpwء bE kk R:/lDKno "k9<ԝL3 qKd#H(&(¡5V:kNkm$b)Q9e*S懒WC8'!NJ'!~vFؗ-kr(eJqRk_bi,Ɛ҇Bb *-H(sia%fJ{ZI@> C]`4ݷ^2+b_'Y*^[> j4J FJݍjv͚tmVJ׫g|gK""*E.k֓y?ŠK;΋#muv3k BSkj/ ;.q|zvrTѫϜFЧ9ϿFyYދZuhub|ΎY= _ {NŮIw `CQ,D aǫ/^6mS`ǔ*x ;BXVe1ٻFvWT))V_*W*/q*'G5%\V)4x \(t.4 t}wYf$`PUBy%3X;Ϧ%mΓRhlWD.0a ;V/.FZ)= R++" ϕ5R/f^plsuc, S!Yf%ܪS$c597388 `1C Ux>*QsgJRNj mJ. *^o˪EAVNG%ַ7:EM;2G4Vfҁ4Rp.Ͻb6qC1mAQ5@Kޒd]F+t^d#mt$ 6l,EX{TvRy55tٵ e8 v(N26bJ.HƺN*M >'H.Z3)v1qUTAc;Z2\i-Xuë)hUBMC3['3YIi퓍Qa`p>9˘\0qL6B^rN "7;C)pWnkk\Vmhmꗖ3J~]h_cuYÞ:e#f Z=)4y+\J23QhMR^/I&DKE1>*,n31uMnrK4Cn%`Me4G \esMb{7`aXFs4轋>σX̶P?Z+pP >PLʾ;L;N-ȫs4kPUTt?L6i:σW W)ws3f"=W N[KæF*paQwJ7fO~tr@|`LL!n5 axk/X 3bℓ,hkLY!GGm.Tn~kE =[vTGuw{́T׹wQOȍAc3&d) µ0p;VڻMq{;:)Ԁ;2R} ;ĮwфkޙK3=# 1VpVfobMU7Ū٢/9y#63<0 =*#cA%C2ފy+?+󺏞r,l34` ^K5 R9}D6ōB|O~obBM1(nxf -%xz Jf4I*2%sƎ_L؅cvp'7iLEozCzG'ۛ돳Y.&}︾.dc6ޚaqrät`^a֕Ǻw+Mnz~VM!RU7\[#ner5(6y21-`9ZFևG4"b2<[vb#~I%i>&#cPL}D 1G+'A 3pN K!YZN:uc=xtcy-3IA11@&2s@\Y8xBs+_wS[l9sCPt!b+hݫUG >@M2)* J\,e ?$qYA,ECC$$SYhLjQ;r]Kp;̑%9VS…#~KЮÔ2ǭEZߍT&Щe35uMX1felY[ǢG:_ 01xD%;Z~C,|W i`Z Tk;9axї<<*h;X)w+X>NugWjljaǓϳaT 2XA,KF* n BrTAtLbYQ)Rc^LN!\5_f͖NS)m(78cd4{y[ +dz_(LҮlrÉRj*.6džIL#Q|25*{ut0σ ;Mpa-˜ȿ&Y/3 8Q HB ՚ '=?AL&93iҳxIfg6чQ*hry>0)-9pɇvN&Y? }7F>5X Ʊ_͹-2?ͽxP zOm<Œ `/ XU0E/6_1ዥdF_oW" PrOw?NˢU>*徊6و޸];ʯu5^mUxؓc[&̋X8m:}2pmU>ӼվOw{^ FZ!|SZ|oӃJa^5G/b_ノo88XU̫X+oXnwNbLtP>j1kTyNI*mJQUiSkDI9҃lp'dS11:H.9?B'!@Jm=:Hm$Ʈ5nSN›/_v0v> J96S@\ƌ@$4vagߟ~>K97DlyZZV'Y)öě Ǭ&'9V ,=2t\ﱈnvk=VY{]FF,+h@ßg[Z [*h|z> QTی1RuĘKmkq';%H 0xxL Ve!5 k$2Ȕ o=Y6|*xT`tH_ߋsRrm x WbFd~1yɕ=ǚv4}Yݸ7 9&zB\ !"vCs^~B>{ɀFShr}jB͸$UYx[s b&2eys8^E88dR)VZhrW43]f7 PH3Kd[~Z[3eVGJS5q'FR|s8;,\H ?&[« vr K!kǮ(IzyS~Fд31"[A?'>.;|Fxge,76~} +Q#0fR{3ˋa*߻7c[yhցoxs K{bX)٬15ړ{]D Z%<6F yALsգ)fwf},e,GJ 2#)x,*_B-q5ќE8'hQ乎FLI9!L`Ud>p\0m!dIɥ:bJwe$IY 6,SmHK.y k\kWZ5_EjUbb!i I%/- '* 1'V]w. x})++hmJituK0穡S[KYa$D 0IiM(fYDK"E8&@hV?p@elu a0!zbeFugE$fgxe fumF6mR P%ڮ#%i"]CO);DI{9 aU=95T9+XMo'c$UYsRKtf5Ɇ.kz 8zi  (ƈ"YU*q-m&jTn)`뭅Aiz̼-È g& G={a뤼  sMMÒϬ!po}Ivaq挕5yߟ4 .Ք0Ijmϥ(- 0fGQQTXA j*}}NwA5>ʄ)OȯqjO>O-muaŽ3 K$2gk9̓qI&쳉`Um9#cY&IV8i DWɬIR 2&1-[n]SGWxJ*@/T][[JV}BIT= Y=hZ?$X3w\ڈ+%y꽿|TS6iJgCx0?`Pa^Yd#=Rz#o+#OFǏnX'GsԑoIKBJ*?~\_ CJ%;\} (0׏79_.ZИ2Y!ҨlF/g)LlB(kB-0mk0pa@Q8amf}u+Cq,}ע Jv}Z%$/Ys7kZl"D_K+՗j^__Z4Ů('Ԯ^_ԮM_]]bBEجT־@j T%QdWZ/SۦL H_T"?aVL%2bW1SK¾Ģ` 'p{|,5 MC"M xSAQ`BxEVPYV9ȡS\zYI\uf;z#6|-d[kx]~X%Km/IP[=g^ݻ[~_nd܅Es 0ܺ0}u΢8Qdd`@Kf(:JswKm}`XOϿ~m;\~'e7IXWtkNY9땅o-73|\';jvHbc/F[Tyk`.uxdVSߥE\UVИ;|la[O  ^~4s[!QeGN}B5uPEl˦0 ϦmCԈ cIxIZOwOCE4I՜ @+O~<>ǣquc0ⱚ|]H%HU/XWZa|KKZMGiV~Gga8= R5BT^J5ٰ~ j9ưGZ| "/eN~j=NҘK~m5co@k9S7 K͔l8-MX]43I bِ3AY源ZYy+ {Q`wuw99?ܦHD6'Fm 1q9;]t'6ﳾ' YxvЂ$[V\b{%4ώ8j(5ZZ{ ^8`0aǽVN= fPn^C?+}6Z?vKV2ree.}c5/ǹ:֟{bŃ}Qh.nAH@ksKuױ^= n$2ց$1TkX,-|Y4JG!u]xm)Rjk!YFؽ5:&"Tl:|Wq/Rf埭c4^dجԣp=H)frʯr9]L8m5&6ko 4ޢ{,h ?1{a/ )fL w(އ[Pm02V۷htQ?~8& 'I6urkp0SBX=קͳsYEAY{kx{"$8 0P4>%BOD5{:s8FTWRʮA_VD(8| uߟLFtdOB!LGz[Uum'5g߯#瑾=]ΟARg'5f%M_E"B\tO_5JeéԖWꇫb=ڔe6ܣ$bBk>leu8֦T݇=bdZ 7R_ ! -ǧFTvI+:30\#[ #&`uf%VxXZc.ς_b]/_UwU~߽hb]Gj.U̸9zO'O0뻣<yMyD1r}`͂~ jqEec~qpڂ; ={'!EkmYƜb?© Fsn x5f4*Em%0QmȩgVu"@h鵌ҩpamr 6 (w <7(`=`q 08:A8=2 Y6n6a05¤;?&v;TF^>F!GH\RG?O~œjN E5Ͽx+ v>0US&~~g`l~^aܼ`Md,N5|\0~LjCxlxj6:0~5ٰW QoưݻXttч⢰jjadlW f 4 7&f Dg s&9SQI Z=xpR"kyzy1by:*L(Y^j.myɤ;?Z̠_ׂ4ܖa%~k)N08J}m^O vz/RPTZ _lq9]>j84=jYy]:tF,rOVw{Za^eS!f1 &a ><4PXO`漕 juT:amge}7+p`}-Azt8@SUJ"p/x*L Ïߏ,nCY֗ [J+Ljb-/hTU,c6 #!J_tcibgV ᷔ#? L`4R=o-cB/EkV=9۾%#i~QעDҶݗK[JdЬWѴÆS`",#^t}Ѕ&9إgG>c4[[BhI;旓m+EtܡZ[jj Mb +-5{:Kqs;Y^r}cyv맣D"t6gi["5{QM~Kh{IN~燿x\/K,54e]QJ+7Ã'ppT 5DWӠc;|v:2m߼7SqLg (:*U +& 㴓gxpJB瑨XmAEM.]ㄵifW:FXFHy>o(>gF T!2]L<H>ϧyLW[kl8ݣ܂>}R U 5bL  s)~=SpZbX+DtD- JQ1 dFEh^Bַj܋:Q 5PG =3&$F.6RKF4gFY ]>*K \ a%,Wi/ycۡ&H2k-k5eʫ(ZG)QKXќjTn-fdu^ǫl_1;<%,;4pg/?I4H`5iE)A=xrAtg  t@AitV1L`1P `L[dCfQ)6$hlFITPX8 ߆R]:Ot(5!&=^wM$VѣDo7sp\33Zqz!H?8ī <]bmG6D#-y1J6/w;f`䝿; eyr9  @`gl@-ox9~x~k[s=S0u%e{ȸb-9?{W6x*wQ}w@ںAͿLq@?n.WlCk-OժV"p<1sl`61z sFF'uy`jqCQgf+[ etiɌՠ`&᠅6X%H.ϙ C [8jV+li0[В"ywVҚYh 55S!%!QGap %'9` ځ4LpA1cf ʀrKF"pۊt]VJQx-œ)Sn-èd{[Ox9c0PTe u$  T8y(3HD*vFQ4 5do9MbR"PL^ sIu o4Q(ƴ׈Z0{Xj>؊91/{yÓTCa"azH_AV™H~ڝҌ4{P^ 6` jF6 tL7` 5%222"ȸXƼ$OCY: ,sg-KwDcZ',{:mR $"咳܎ĺ Rm}U . ͳT}yyIyE̎ ;a~9B{˶.p ZZk1zC˱CPKZWD3:gN^Ym=A큾Gݧ+2F4da֙Եl.QDv֊ɔ9g]Ujlȸd3 H4HZg92!YBa9O[!//Rt\}|<_1cȴЂJ6l,R*dLNlSK *erT3zC`RO+,gk^2ƞ#:;fY"8= рw#ސeqV/t{a^fƨJ;Ԛl4I8 L_F{Tʲ)=)j5ub@mSS^? #)w?WsFPoU)d52c+su3ʍwļ>!Ѵ{),c=vwŜVWŜo0voȴk7 †L(S"c)8?me])-5@X~tڃ,!X--+L&Yx즦=4F~0}ZؼJר7#l%2wj_{DMǶ#lo@ FcBYŒop3xU uFs.B|po9A{4hy=CO(A2jSp0')8xp`9P~^_t{zwEߌx5h"zΗؔnsI"!#|!Ӱn<]NM)fF9>E7Ѕ2\S6$vV >Q@rjbW0"OjץjYPUh:9w>Wt+_\õIP[t2Ԣb.`$erYIMfžf%*-,E(UVeRj1V䳰_MO䛤J!to{]9v ՀS7$ӡMF4o)Nj쀞`Z{$˦(`NkrGBY#mo$Ϝ?0olȎ`:~^Y.r*麋1%IZ UڕΛd zI&$`hxuگ$(B/r5 B͔M蔽RMX"I؅ }cST{8Zʀr~t? V_Q3JH LKxh^. 2"\O(节tk\bA制h?ԍYiX5$.R'ps - ZFr)wrT~*F*-¹YvdSPChP[/p YD X*z61n7 gWUCn/uRת/uRX}iíh .ȹdfjHleovL1އ֡&J)IbRΘ59hU" > +(Zx &(B{7VYkZr/ ֡ZlCh jO,1B"UO`#9 xLG6QA*l5oNF\?^ꣵ XˌY5h~? g{:^L7a4thv@?A!T#N1X Sg3EH)>'K9\Q$5*NVajGCrŰU$/Cg-^HY 8ƿ>j0O$f+*4{WJdj)_fwL˥a;tnosҦrrv9y5{8]{hS{a Ǔë6; ;t"RqD0TS%U u}'1p Co4Л?^$9cBы9si( zK*zI(r*~L(\c@P[^FĶzJ˹'/S9U[OWdyvn7C^l"elZEN%jIy?@z 1vog };SFãv/mNX+B*C^,*r{&[,rJ7PH9myRuYGJTWP Z'b) ꖐ=DB C*x;ox!nInXo䴷8ztsnAl'2P&)ϡw],J;郿_[ڒ_[y51+=㖵VݦrNȾk:kK/m1ݬMJN»ˮeTӏeIW py!(\"g4NŜb%p`Rl..nBB26eS9BMDQTM11 zI :z3y; ơLާn8;pt]2dKzhlz/Ho)JmUm{&Xbt>(l>IW79S,pLΎ[Ӵsg/: ݇s8"m` \J)mr,6ˁ].䓣j,[@6}!w|< m.bf;O7eX9H|&'߾ :%䂇ᢻmzퟳӔ:tiwmmHu8h4ЗTּϾ)'A6{h& 4T>j7~ьcY:(} }fh%sFS &Hn9R ڜ&hZ"lQܙ|r3T(3F[(wvgݙsMW,J.9Vm} {Ø#=<2mñp=T|IhL 6%c [;ؠdڽ3zA6>`J5wб>Ї1oFh(@\tm>"3eV(٦G1@u 'h#3J$ncoOxa26v=4e4iڱCB]FԙܼK\1h#&-ε^tWs1b V #eqO6^b12ϣ8`jzoPGEC5^{oӅz`6rXZV8"`0ؖX11'?0FљiFљi!0.-ӲboRCOLlSRꠎ.!.N;,1QJP("g'nos<%wAȺѮ4ڪk ې4;䠆w4gBŲY ^f@>*PR=%V8/=%\87s0BqXș9%bD,#yQ:z9_Lj)Z՘}Gɻ}-PkcތADdz9R0>ܦ6q T.yn?^Cߓܱ hg7gօ;XQgۀ:}-_oľodvi1X8TYc۩aHB!7Y>jjn,5݀QTM!qFWclKTbzu n-l=E2kOu=m&WGl6]ӨFLA8CfhdB8Nfd3Ck,Jhyfƥ!LtP <Mq0nOngAR^LxFsv\Ovp܌\W*uogMӟI#wnMGMUaSA7M盦=D@l6nfG)RCĴ;QG"C|K t 6'f.*0db.bg`rdAh/-DtUٮ.?9'<]oכ"W1єPA ڝ)\h|&A9P"=mz3"FI=BdE%Kx(1{UGل3E*\_~-?eo w΋08n~n7zQϿʜ?{G +oHnwVZya_Zz=nb]sxFm̿=}̃8fU4wہGV'-dԧ4l'>KcK(8aS44J5J(2@I=C6 fs3M{þ%Q-hZۗlmdqU]!q0=4x07J!3T8=Lюw̤U'7LfSQ,S0lHT 1CFt_]JFS=b.ZzWWF'a` S1]|iyfn5s:O[4՟1/Gi@l~cs6r13h  #C0nc@=oc^36T5֥Zm#U2[qp J<<qIf:ևsyd=0$8 m,4W) p 9u\{ Kh lEڂq/mݍy;?W<Ф^01O[Hі艖Жg_F!}+b!9Ek.ؽ>=CkuȂ|sg}kʼnvkW0Z?Nߦ}_fsFP9}"sjop hj)iC1^ "tkRIT!wk璛(as᠚05P pGpR]ttߟ/e(fS(ML 0~%uj>S"h?D4x(ѱ":ul6pج7 oe5FKtJ͂iЛ_GWhzhCa;֭ x#9Z(}s3 B pꍫdGG$'ç`\^OfGL\F”i@Zvt*}ٖϳ}Pqo˫nJ=;]]#ȩ~^4pcGX{C/>h?l_^|f7K|_*tͩ lU4|qMAG"2m<`J\ Ol}Úh  {XtNM܎{W0/}?y<3VJZ^/XYTs)Td_&lZKZȷ퇽L#PQ!̐:eԀ$2hT 31y]s!u8 l,X+]+m0ĂUg6Ic=HnsGc1tϗ_?;Ͽl'P[3O#p*-eXF/)T]oQ4#E\kdB KiRtI D~Mqdr'8fzF3ji `VXb D4; JM|q3C8Gl-E訲m=u jGֹeKm!U&qwi[ zJ3ef?2eQ*.3P4Cdr.hǑ{{͉=:&M9n׏ꃸDXc/g&{(, Nn+oo㘷DnǼUǍT w\XoZFfH"Z -o:u&a} JjθkGChh X?} R)[)jlQ) ԣ>;{'ɉ hhaVTj(h1Zb4t 6c#Yr*ʢQѴf 8Uoa8t̙M0`4̾9́7vXKvKxL׳$1@m~Ш]v/JԨ݁P;[cvavt18$b^ٽLfM}#Cq8yIkn3j@-Gdm>B{׮lSjzhԮQi|SڽpjRŬ#vd3^D'1O#v#ѩ+7j7юX[9du~j8GF펈o;Lj.:eCu b"kn 0JvBl982n+0'&fsv-ծQ Ⱦtj5N叉ډؼY8jn yS!iװy sbP`ٓQF !%׵&ǔyeiV@Ȯi7^{cr~7xG=}n_X_uV uvoK˗{eܰTnO?_ J>m1A~5~WUtL]V4 ak*dGE,L=R #SOߤfƯt~t؅/h9(chI SUpC REE j~ϘJ(,Q'ܨȾu(c>-<3$Հj$3XrC^IOF%  Nl =ӝDsT̨N`С7ɏ!C(qL= CJ?,1Նfka0Pxs2^Kvփo:v2SkNv#[Q>M<{ܩPlu"?ޜ ӇduY}?z?˚t?k; z=z\MZ}CiRϾJ3E[7 ﺋ~,-`K ˗wǠPD:TPK]d4Q+@]فy4#:Rz"mx#QMI 1k?JY z\Hg tUl,t4EpQie)bc} C!}AcN5%ȳ$g&[9遶rz̃1r;KsEݹ!:ɖ(uԙ#d$8408P~i f%c=՘G6|U%]-\gW^wG:I s}X1jf\)涨53&68@Jpٍ9&vsf)}O?vK>}UgϏmgaGq iYI0TK؉Q11bNL_| 7ҏ$'޻qgg{vR\ijdlii#Ir]tD)}XRxX{?IkWGS?b,ΣWC=}ijyXb {thvCCh G)$#CPG9 )em s%F$KK48tj]b0sCJs*6.ff hF0$e#Ff}SLR ~y~YJ*96E8'H-Y,B%@8 @)6)j.XlL37 0MGk؛ҘWT|q*a4ޗIr#̰%>{00c?gjP8f%CGl&EoY|_o0씲SD l$a8oeq$saD3=.Vp#JG#~^:뉌w% Fz᷑>#Ƨ` htjEzn#H[/zV}ND׍6"l_7c^"h|*#f{^vыGx{NpfJouw/ UeO8l:S}uNX`·oݻo:XW-v̫9LpR nu#ɉgAU[7ҭݒTd[f/3hΠ3/S=u+ɉgR:s+zs z}J-qdW& V; :R{75'|>*sմEY͛1j*2; 91*A;I? K[MNzQ) Q-a1& Qs2 xv'$̈ǰHlq׏!E?6F81 -[ϐy6A@=nǼYt 9$')$nFCCM ja {s {75%<ߗ۾2leZc1"/}e>ER~ b5Ep{Vj;Bb9@vaM*^u}$:F펈o[d0^(I BNzR7K%FN wvrk~{yT+]-^sg&ti>]0.;{t]q릘OW'ۯ 6柮fG6^MW8RjF"3ya:F%y﹬U3zCvԞ.r>Ȗ#yy*D I ZRCsM`s^{dˈKT=a1<ܣm>SFQب ̩I%D jK7jרq4oS;V^ɩ;u̫wڨ]v| 5TP/gc{1OFUs IsXp?Bdt^[ہGC&gou{91Emm.znN8KQ |`RAsg'һޡw5'w߂[߻Swo3mӈ\TmMk;9 fgshRd"HN684kSCeIO1b1ϰQ#vZiv5MQ{0GD^vzjWocvDdG| !JwNvźیC D-BY+དྷmW˶}:vZ[C㫍Nۃ_=xjX;_5̓H 8ygW_ }蜺<jo*V`.Tj'sHF 0SNlaԓd<['Qm wd^;Un/=0$i#;n~CꂯR5DɎ5r{tlm(G/=e$>UZg DlQ KԸ*bâ5;_N.?PNL%~i|Z`(UYNr֛ŷҧoB^n!=1jpn*c1]| v-.^\ /'wonۓ@֐LM>|=/Kslp5+}?I鏅Z|<-}x?T=HuO,TjW\y( lxp7_||_7&+SINԝ9R6Iٻ6lW̽s~lO0@&680 Zm&k0jb"j6"6wu ӕTLVm&/Byzp`cyCL Zw΅xʸ?oRb:w `.; Vcf~ /֙۞͡HJwOr$>5\C4ǃ Z43ͷ￟ӛ5Xheoۤ+߾lFsb\s%V D?%G)kūkw'D%S-ώr_ΞhSRwo/z1~5yjEi'̗)=^\,WDE'F w>X/.S\:]Ao1;~ymhtb],"2;gd\wl.Qn뗦[vvflI|fQ` dfaCVe7^]tû^9iƚ'ƅ;vKRp˔[D5HJwj~p3 ^&C KijSƍHׯb/.VMܧ|7Xg5<w/+]¨?'M6+L1%o:ͩƕ.ِ6_!] DV0 +"[M\ԨX3t\KƊј3JqR .Jb7s[X kE2*B= Γڞ`iԐZ8eSavp{LA8o:k)r.zށkl?nrɝ&w[ug/pްP j4hg"X b0A8 Ի -Fw%4M`q6c*];[J0іcg9 -py)TcŸRMz|5q{UfZƕ.b5pL(2qMFX*fzјh.8j)V&Ią!JZCAL0 /Yb b;diHS6rXޭ U7B \ٞ"6_U69Ê剔`An Ci,\jIX3Ɨb%+9gb +A ,#fS+m~4)$>A)`nJFvS2j2Z'8Ψ }6ø) ֊xZ ->{{HN%Gq{J* ~4mp{6| ,Oi9z6jb#l!Z#NHM9p(wDZ)GX"ar%*,3Fq\89^8i#cǺ{=Qњ1 q+O ?*9p먎5Whd:EԊw1"QkQp"P=zJѳF-]:uV.ˋ?f198zJf1\| vq:g2CZNSp3hMM(1!G^4 EHq$4w\Sj<)/g*%c;ϑx$Cp{%nҚ FR8. /Z) :>}%gs%/ ZtQIT'`@1!Yġ$#NZN ;IĔm7f8˥Z%cy85TM5T߭WK866XC:FmBE#No'm+scld]2tF%cMx*pk=/ZLW\W%e<-rbFaڙݥKtkBxIv_o1 0ca y9އumv,j~GK7jaAǕ?J~'9u(U6̞tCQ^hეV YW0a2:Bscg%V 74tZT㐻N[=zW6LmϏΓ^+IVF2GW+RZ)U,>hqD tVwKyϻ^R>b$-FBQxƠzWGeZ-+e9-CAhȣ䲸w[2jmC|zrߐo"5`y.յB ]|q=Lmg4_2I ?3҈trnӍp<_{\9  9R8ˉ)ݨ9mhݸsuh‹ۣa0cZ7rveJrjB1ֆo{`˫V  b^7Q?G?*=qba%O ]KWӶT]1EMlH0I[*1lgZo&n0?}^ʟY_-S&WHiRӵRkPIΞrIVnuQ0:~WSWW<̞M * sx/28sBAVQ.ӅGQ9$39{k~I;_ΤLnݚHF@  -X`gʎj%Ϟu՗@aLAK,h֞?jV{%.+ 5c?X8Y;k6rF2isz&!5:Uh(v00 @Fv۝G-8c(֜s{'UYnT^ rUqEuhZWԈ'0w&+k| \`.vXhJq%&"S W6s(`e Nhnk%* ;gMvC~JIy,h0p>4К1,pzI-XuАt5h"zDԪh9pYˤ!`d0RgҥxV!$salxLŜ$Kd*bE+.'ɦ 7zd^KV>k.Sb䒈tƴޭdzA$B`ZCBOt i-- ͗'RO/טMHԽkعG5cܜwTmެmBr\c$Y鿟#<:/+cy֬o[Llb3[-! OT.sNFtcEO cVbf]"%~A'wu|cꇃ`KW6}n o#熔1%v[%čW5`1핌-:V{ϗ_'3b1Sw{׭}Nf)~mR5%QDtQWciT7fQ*a|w|apCZS A>WqeKYGD!K,XJ[CC7QMk)wTRYg5omk d$Lc8eDЂK9_UKMQėV=6.Ñ_x*ςhgt!5=G:|J[5nDƪTcj-D6'O;c'bW 0I5~8-m?SzR)MI=#KrY䢡Gk-l_톽oկ jW@ 6_WBȚR[HO{ɠ(,"`j a@(G]΂%f= pR[< h%N"m $RK9B" KBUQICrBnч1d;A_Ƌwmm#K~8P@` gl`4b -ER.`VS7f7ER=LF(Z{!Ng]25QC-iA), d0z$<1YXlӜYC4V[XEBlDqWS 6X8sq>>to.9AŞEnA0\ s)s┶Bb3ͭz}sk|Qɓ'홟W+PQt p_QtjrOS^(DJѢ?RHR̴y6NE+yCs7OګcՔҖh؟M*$OfҒW~ٝh0Ѥ3BS7x޻2rcSÑJ. ֨#鍊}/4&W6dgi&ϝ38fO_-l+o @G@Z2$m:2'$*8AN}g'FU O 5:jJ`Ay+rrVd,"5N zp եr]GBw,t刖DXHOF -81! ڪ,זr A^EVVՊh}ޒ/ֱBh<[E~90+XkLHĹ}Qޛz가*f4]ʴ/n2*a_e`ʖhJ·Qk(kcWNczC :gZ1CJuT2J*i=P{nS1O B,jXuw[)TyV]̬#EְU{Yeau':\s~ {6]ZÄ*Ik!L7bϒ2V홶*`bΐ]0DVN`3ӌ={ɐ63ץ$t V3! -R oG7r#ƒ>bI5Qy< ʽ9U0Ă::z &/bn5@9өyD+U>T$j˖5h"E)O5jxwV[kV[ʯK;oioT;f%Hl⍝Sdn{Yڴ}Yڤ+?<ckt'7R _*iF"icZS+iGhzv~咻O K2ϱהr+n\ܷp' ,a&`bɠrrDHmxxhemjWNiItEf*$ZP֓HhVG$q`6ĭ?Ȭ9|mVPy/b@2Dq zsksG¢0Y\F#Bteel.j]nV'ttDK-+ݐxm-v لO]i'OB}Or$W4LxFrkݖI@ ^ٳ >?FK0&dg̅: u(e,'R`-<U,Ce l09qdú/lX{˲`J-AW+؍]$2Qt0{ !Jr)N+Dጴdu+;\*몄V3Ρj(q"XtFP:`$p9 ޒZy)JgzdAnck/2ȕ f:|5a퉸^:Y$X xMob1iƂcOHRC7Tg{QFljrGԹd<ɑ m0| b#|9e)F]pӞK,P2q Ϭb <%`Q=` lYxdy=d87V:I R5bX7ǬȱilؕYZ5l%OĨheڹFĊ*&&KbS2V2GMXDRܣOkP7@d0GpPJŭ$^pie\$(&aGkI*Otc &mF9ŨDӃx\e/VZm)>)+PF@sqʙ$+-#`KY0{.MVJ ,`ȾnY|lo7/Keu)Wa`ӏLh4\YY,$鱻KONَw̏~/v]1;ݛQ3;ݛtӽW?vwAzSC}7Tӯ);H{%;ڸq\ږ w1c@H-ZN=@;U$ZUq:UdӷҾ{g;qZslJwӀR ]*o`Tpud4+ڳ^jGq0*S Q&(9Gv}pC}M!X"J2Da&ƷdGfxe2n!PCf0 W'bUO!2A+6հUu&Д{25堉v2ch^)qS8jy hVo@Xc/, @Èy_IZmh*"jK! Ct6quAݻ@9p{e q^Džcqø}v"0B)t\S;>~ǿ+AQD-L+gE<05R3q D "FյjleU1͜RL0'F,rxm#Y-Q[ulPScϧi0-{^+ cm6S pHnoV!SXQd; &r0 Uj6C}-Ϥ\ _Tf ѩƨ0~.ޮ"qzJr|ҽ=sA90J ëCŜ!=L0S Fs>f7^ݺ__vQ˻Ww#3w_Ozܓ" 5eQ >:#BYǐmdsQEkk,#`=#InL0rNd&ʍ~tRNwңI`x 69Q$t)`Iyj t"?\W *'6q4Ϻ8DHo>O$9g_# +^ ]w{3Fi;d<|fk'R31&$c }>egnIώkOONF Ci0@;ra"ej >ؾ_`54S }]%+xדNkp)җn I|i_./hd m0]]xvsoO`P{OYc$z<ƓōShO6r ywk&јN?}aX~u*jW ;_n85x3cT7Yv"MG1=#cIVH*/j qn0IϏ|f| g`9ϤФ'ڌo&.i2iz3?ws@$77俿O05'$Oҡ  Wp~z|5{3J܊31M^y6coRr(ez%:#ft$ ''ZDIHN& wnZE廓5 X-l5SUXsA isIK9hcoq0o:kXp3rZ ̯tfOηҿ\֡!D)Tc{?KXIJ݂e`K#]f\ ܢĶO GeT3oSџQ7 LIS?QȨ0jy֔0 Lɢns"riPIWJ+1_0;&G!CtwNZGowу<ٱo* (ڴo-iomڷ٨۷wn߾u߾9ςϳɧԔ؊@D bZoa^eƒn^qj<ˍ쏍hqI?U1A:.qɎK%wd%+qxr.dRG80"X"jl䒩,c<1Jzb,q&Xd"["Ϳɨ;Xd"$Z\o%aXDl("B@15J7GTe\v~D;,g}e|eYgxN쒛^)!r  A_ƽ1}Eo1gNn7On~sz>.1,مb_p8pvы77z50$C37 _Q*P1rN6Դ_7ij_q -uoJ; 3pQx#r 'Sz&I~cKz,q>|awld+a} XyqqkB,bBuM8tm5NIEcFRr̘m2boƟ, 4>Yɬa @8*x``fyk!,l69.)0]@ӯEl(g%,In@PB딥7B1e]0ץY44>_*"k]R VzZRLW<ѭtw-V4FL{h&?*pq~3fJ6Vn˷V[d /ƟG)X@ onvĚ ʇN=PQ{_xovx{5en^?-nHJ[qZ8>TW-qZ_`mrmmƷ膷]Fݰ1R [+o}t˻ ΅VETahu&.zIX~8\e:></\֒9ů0OGCz_8NtS.]bf:̪"X. T̆H$ &Y!F($ K SmN~sd7Lϋe W}E]c>g~ Vg ?;{kv~s+aDDO$`aǺ>4cv(r(`:_OB!ԑ&СXiRRHM$pt:Q$oqGJ5 HG@Fbf(\5'\:E9cYibQ! * )6Bh(gH <±DQ!PȰԑ 2I$ U#JP$T,Q T᾵vQJ;Bjpz*\=o}aE#تfqhN0؃+0~E: H4ڀI=Y-C3ñ! ճ](<3yI./c-[ׂ1Ԝ%4AjnXs VܰVY6K`R*Mb6<)JD + HHFEd 2VqkYB=ucPa7Z/n{ϖM~wJ-9#+ M]٥cZq% syP̺LUs Ah$'5WRXjg&q&LbS1V\bb4fJ6FbG mHH0S0-0*C%%R=;g>L޶JX0aq'ɊBmzPA(beُ.!z}[&ljŷ ̡Vl߽~sg'qh|q"+/C7h`wHP !\ޯLg|v~;]sowl*?8/w,[b{7~Z1Rӳ,MGj,y&G`lΰQ+\L~{T-_I ˿GO܅T{d lNimk}5Ec])B.\>ײk%Ff+lAY $L5P [ȽLiqtp]!dLm ],:٭y^wya0B0XςLGz j<2޺_e x]zKu >ꬎNG9T?A3ա[ޱ:xFЯfJ*:ښj^:plhRmJ'66Ri3TIxq«NUI RSGSUJٟxZn4?mTJt*5Sr72-^?%":] -x2,[o]5tSR*.Iy\R@xKd>Ek|` |dffǬ 4`7K>OsbFj+L4bp Hg1T&XHAV)c 4&aӐZC 00 ՍfƊ$rFCXdYޣJ:[*vC@58yփŔD0ԉ&*$јG2K_"0Iu0eME!Xtj/G&TƜcKa6$ԜXDmm$ !FJW7‘JLZQh=\Ub.-FHȄ'q,Gdf0!Nd /Du*GiE-l{":/<{ WsP#b%8]b3UTIUc=tU7nK D-`?a?-V%`kl@J,y$`,a?RMnJv*y<M%k2\`Ͼ^z?ʻ q+~e2w_o/`$D~SGBWzα҅ٛ\Cs  LW*QQ*k-E$T +K\ ^JjuA8*X]+ԛ(Uk5{ZW@El4K~0Nb-̅ 28V6Bb>ٿrЬa=iaz pՍߵ{)ӯA3gbcI"//7gy 6X#'OWR(I|% ob{fH)J83]*c߾<4܎DF Oz3yC |ɿc5{5sva:\Ul3 x<xj4`Y4 Xcs>d{$3|Xɜ]{=M ? JpQLWu[vr9Qّ!!lyO }܍'C˵o.t#^s%]]Q1KkCeDQ CaBT2Jf\k™ na+)Ee8` oRp"xgN^EbRpVA0&htQ'%8_ ׅoX{cIq[RL&ڋT>Z%Ա-w7RmxB呸b0WSC/$,R+Y1%ItP[m@6"]'\$)NeYyҳDX,DjDH }72(RZ|e&R ݕV3s6 -B+B QTj,A eKAljQHABD̅"$g'G"&MN5>˵[#Zԉa(?.FE;FܬQ#eq%pM@9˙ Ӳ #wbJ"ȆPVȣIEMx%AQG5/Rs -QD2TY<*  "zi N:xdK.(S<i D@MH-xb|.$#P 4dq?~seIE;x4rx5n壋)\Jֻ}\ZW *w҉>MpI%DS)Ez ؄tJ4YPp*9Zҍ%owSM[5^嚧dmlgV!Ve^ݽ42wa|Q RfaF5iChSCpUy$Ad)l׍_&XTB ~Nk<bN7WWQ~`Q0bD6N(K2 |R3[ %_yp 3%ic۟Q$g/9A9c.[Bɭ`Ķg.6.$ez1'/vGɁu^Ĥ:&~z(yB=G2"h#g^NQJ Y4%.k Kl/jV!@wmںBU,Kݸ˯6uՙ7?{荻.P xjȟ7JZ”JXR|^o/YBd]>޵ |v6\PDAHa@3GA7#A>'&?gj.rAϯk.^JNrIK0aQ!X RGR3Q;j4|sKK2f:OGv?}*.7)Bk԰Zs$Qzxj E%zUkk NU9[ ڦ-Հ5gFcRav'6\[S%N0t׉}h>^V=ܯ/Moؽ_ն{GډLъW'㚋Tĺ{-37#^B>%VkHpҋQCc-"]ay{d,,+`=28u "%?I.8pއP%-Ӂ$wY+F}ˌL~[Ozzt1^F=Vyg&x5DxG}161`C27rN`(+Y]=ӏ]SJK(|s3M&Uy2ƿ SVyy `ѥ͒&Z0uy3y0b;QcdFwoc( 81`eX85:gHt"!%UA9q .^jg )?:rj[#/Y'M NnN˜aolꨩ>ǯr")E =Տ&4\y9ãj뻦޻Ȳ"˲Ƿ#N0?D4p(ӑۯ?kFk>ZrwGR=8g bޟMF(+b{ϚB\߷E^ {WQvY*.zx_P3+?ArB{iu&Һ/r)xo8|"ܽr]$ja#6uimj=4־KcS8H=/7_߳D;_-__Y=ͻ×s䮑l—"n/gTA=D]_uZfOxEՉ%FM  =QkF" Rv蔧dZ|VF瘒Ai)MZ]R2#T)NMJv^qJE{d8cR2#N)wҔ$\maX2Ԓ`}m%|=_XfI3{g?y_]ܫէ_zzWoO{=0$8%`cԒK5Lo,69䑍4hjL\ TI%@J$AFyNH ^)bg#9>BS0.h(IنA0^@I<9SxZiIhFŤ$vOn&rNH6A(<|-|qv;93FiE|@kBudI|~UR&;ƦqSi?Q`]^C*M^LCT 4('8*m~M_on,nMbןߵfl%9q?x1Y*h aP}1~u?}zm\LKĝ-ӊzI_S>kOTƧb`Z\=јUi@'SѲCHW!u wpuiu'iJrS&E B*H99SC\P

`;5~-/II# QxM N } GMʝ%O^G0k7sd#@rqCh ;7˗X g+))Ɠf?o?v¾(ܞojXĥTJ-OPaLN7(fl`y'b$2@ ybf>Z򄵂I.&@#! ZYkh],9Y|>[|BǏn/y0Q^@P5IDXdrApArɧ6ɞrC%YjIx<L"fXE>U2Z'K2Xi( A-؈μϠJu %8BkUaL+Z Uփu .\ ^,R3vy}C_]˕;BJ|}$Fk٪옰С2&afi8IOؖ1]Cst$'lGA2PQ$vrںg!59,BR&;Yw#}i? uV NW; JBZB`m)PA}n(U MnuL"=4Zٺg V%ZgPWrnRJx wjNhR8 l̘8m517L\{ `zt:fN xbO8j쳐ZWժLcHJKnP֖@K=u.S•_ 5\o$kFثY44T>BCGty bs*L$o/T"azjXyMhrFjrvaZJWʥcPR_xI'2E,I˅dP֙LXg˻Qick1!HuC: oWvGڮ2'S(H.-kqES`(CN6ghOu ;ycFD 4j)+mgp=`]8y~?uhY߇d5,N.X>,.#7ۯքRup:)av*Bolywy=[gqq¹qk*IluSӆ<{?y&7 xͤ<I,܈X^ݼ{lGx-꓿KcӧfYyM Ξ\yY( v_g;<`^~]qȉ|z珷ٚŠi8}1޻>zxwyyg_VOlpdwi x%<HZ}FՏ]xJ+Q;(qKq'XF uȇU脜_:w_:#7#_~c"9A2ma J^9+YKąx\_%-#G+r5r@0:^>#wח;\nCAC)QhWԖB2ђZ|` ^ *j" eI[ͽCrWyxu&Qq֥ف h+ B$ԤХ{c`V5Q_j/jZS6̗q4ߝb?|?>=9i+x$fiD.+NWA |1poИXYZ,T5;~㛃Y=:K)W!1eYl}[zNz,3x;n<댘+K6~9{V?}j摠W&/.b2ʴ 卸WK_uU̽]q^{>*+g xQnszҐ\Et{MC%4tXLLyF6َMsFL)85 J19N8ǤaHadjRF_URQ19OCa;9չj.mI0]ݠABO9n ƄgJƨ.HI!N3Z zO<- ( εbRZ'.yFԊS)Oh:ENBQ=jZ~ $'NZ/.'QW >]ڟa\e,MN[,x;LI ?vs^W4l,Ve0 @cn`Uy,NXSK8i=psRzSHV Ak;GAP,vv͌1deP2DO[Aij}jB9Vs"he(nP ;Jé%VJ|jqSFMe@6?.Ug^Ov&nc#V_lnUr`d<$a '\7?3{D$Ԉ N^ER)LM$#P#'#- C(TzJ@(]`'V%+:PP#F&RIJ`ǪT5R eILTγ9l8ҡnMO9Zxrrxρރ$72Ә*Vh!6%-+%u UTG6{zM!pLD$n>F^F!Ԋ{e:917[ܛ~9{X"SSPH:BZeo~Zff w; tEjǔ˛k=ZE\r^Yg};)W?wu#:*oO72W$ߏ ѯZ=%Ai}鍚FsN0 T^D5On*,YN4RvPIB=@rFe)-Îm;42e AZ3@Kc6[pQRX/F8!YŽ+b7/wҝًob77SrNW;WW>ZUBG̴z"Vd:[9vN%~4-X6mmc.$r4:̔Af/p%ݶ\VucXt[_e &+UM_V/ l P{ )-W zuUUJm,mT+% KCTYg TIѪ3;%УoIž*sF%2;w!A^SwFln IT&~%5TLu8Q$NSUDVܐj [~%;+B5mJ5hTiT6c4Jq)pK"P`R SµdpA_SA2Y)٪W8)^leK~J>[awSzrNbܱʌ ~8Cv>tY:TSdGXwLEZ,KES>%Zcvu(qG\Vv!ϥtjP*$: ea;t<eZqq%vCC>sSLJݴp&cZmnzd]zL):nu-eQb;2Xw&g#qxԝ\͠ԃ:MZ׃:{8=maގbRSnX kӆVюS W39ox#SQ=HrA3&f5NIS4 )QmN^isY+hw6'q;XM[k}7ʐ.Gw> wу%B* {'[;P4'GZC{tz]kHZn~U!W:=ժp$+.NcVIWѸtyMh04iW8l֔ˁ ?Iįzqp.~#?!G;o_w=wc_޾UùD MD0֣@*NH Ds4К@}u1Uww%,(hc3btE Xd5"IJ1FVV  U#${5栠AJU{f1K2΂1K*p 01Ay)Wz9ڎA;dPMƽSYyD[M:+[Q.WZ-H1,_fHV 6Jq*'KYK(E+HäGA1`_ppƍf H&IܓlZo@h;d]6xCv,KJac @. 5ЙDƔ>7e~nۭbMo7ggȧT0w:dX]haGۃ徯]}3p3E{< j꾜z0F]x[iT6ʒ=,;̾ ѣךz he0˧>PĂR3 AE5-H%$ՆUBc EjBыhk,:0J3`)-CqPXC!PTaጔ 9E92%TD< !$pLFW 'C¤1"{9VJR,yFugP͈^oO_/3c?:ڏUBXGjGO=_Q ݰ7a'w~?bt}~1"z oϯNFr6_ȷ7 D4#?#L|uߙ-2o>Pv9pAُ\ i#%/)oP"U XnH BXƵ7a--&ā2Or)'e|SM hPtyb!#ɮ< b5b|A&-vaI"!5iG/9ʺRT ηu sliC^i^+A$*BAR&, 8KǺ++5 Hs154p aUr EU$]︢[BG[fP Aj(ʥ2'b΃gAa( < ֖F>ϣ'<̧Uy'3vfw>C8 JtVBΪ 'Qk׉(FI $1_L4d#OmG={ J%e0E$ '40 J2S 3 <M-8sJU̯RW|unFTMzXUooޯż9.x0;şSC-1#|kۅ h/Ff>2 ; .I&$ 阅R ID s&vRV4o]XD꜉%-J9@bpt9l;wr}/esgL,<3E 3S s aRg0|,:Kjic">#" 'Ud>s_E{MpvQ,q!XLSJfٗ:*l@Y2=WX1lnNlnpww%/ ч7)Zݬj9z0v] ѫXpT]ʫ>} ֛0~MX~쓳7+γ c6 (! S-ze,TIw.×Ww?[RnUiA0FN1c +*иBjUW\.~)^vx&DXf? % ĸ;zMjo aZh5Z5 ~riфecCښG{by0&\npġ!4"I[;ffspV@sZxIE1B. IW?+b%ϖ7bf-/Ο,ԮY$Bcr5*SW2IKY7gT(SⱵ8P!0ByCGky%]UDGB S #CɔerU!j*b:oʾ>Wt2U;|,Q ])d\u;K% S+!lb-4*)wI˂C:©&V (UT 꽆5c^R(Żi17T+R4^,}~Hx>;vӿf_Lgqit~t>tϨ#ӛ]MqtlĦ"[zqGw( }~,phty,~G{*._Չ;,nf<߭\KYgJf]E /!_fnX.SiNu7= ڹqqrږ?}NTy=T%ըO:MPI| Nw ZPAzWeLht)~!cJ` Fx q{B\WiCy |HMSه Z 6R{6gof^/7V|wJ6ևĝ6 tw_}:z;Z|'=Z=Kb׶*EnQMcI0; ?V1K\c4x[iHvyB}!9>>jeΑo&KhW9:k0bw:4:SJ(54CH0@NP΁{@A)i"4Mk ¦{7sxa0`c`oXwgFݥ$V;v(8QS Ddu~u|@WSeZxf᥯} ;-S85ǭp:IЪt/rhߢx8iS9;&lEm "-v[ O[eONv!ۚKyKH!Sue?:<5!0 +33)P\].ԦQyoΑ!Xn1(ڴl,x[ ΃a Z{6:GTgdA\tu1`pqK,[JDGEYI 6ʄq%-Nm=︫5O)yPp=K跺pD=sXzj_vCոRGv 6 q>R_ (:r_A\Wr_M"TrZ}&R; VU>ɈvxpIfLe M&?}yۣ9{xy!Y ɛ7S;[p PIp6.PZ >!3vnjiOߙJ*W6'Qdb2$o3j UW.s/?r000AA(jEn#brB0Qj+pdLQԭN{+׌dő7Sε'PFN^@V4zJ[mWT=Rk4^z!yÍ.>zm9B@QvedTli%CN ߭1ZieԸ&hV"#Qb JE{ Ly_<1r>A(%ID)FRw٧i)W13|l0c"d ~OKuLxάca=[ⱕO59E('(P-Qv &܌)W[frw}qɌ={5*Uj~5 _׈/&gLCw,?_A+OT毨cqJb,/׭3M^Yٗ#UvuQ+$:~׶}6ܦ68)ƹ8QoЖY0KF86cǤ0bF8GH*o3AZ52HۺIfw.unۚݏ+ܞfNBמ=\J]TP{X"*S{H -4-lT,eyT`Wˀ91h)Ja%&`Ǡ" /.ą&qD=2f1#!D19YںLBs?DT єj!;§1X7j*,V o%U[S-j*yZms#% V!8켌L PɀEI!n'VZlT4A6_3Zbs [A#A#lZ6ੈŊc"r)"I.zleT&[F@+l%;cljQwf?>V*s v`# 5H`R]_9fދA$nbkP@n؁`V1T~'3%zYm|sNlN.6n k`iIRA!ۻe4DD\Zhp?ޥ2b={s%,]rK&] Őb4u'T@RpUo)0Qǻ XRϑ\&Mry6/#Y$#*I'1DkC2-yhJ%D.Guu$[w V^h()e1"^蝖P#[=&L(!(ԠU8`1pX5-ws QKnIVmf>a!5ph&"jq灒ۀ=!ib`(X/!T1 i*IY+b <2oQCB6J`k"3Z0ՠg<]MI:J"ӌIrԿ^赹YGDmgd8NjVsDR-=VliǬaĮB6DLz%TNk z*}򜯮̔*V-RPb֋o7Q3ӕv"⋃o6qxx>NF&q&[{nV50t?N7v b}<5N$| GK{> F` i Bx2=wq;ӂ/dYHV;G?>< J<ŗ[Nӝ93S ]zab(R8*e>*L,Ss6y%~LjyjEXrJ Gݶ۶ rhQ 2.Aޘ@.{|Rl+H)ѶCWXG%$|[6 SB`U@EcKZ6w9#Pro/s~a78gP J67ei8\ȃn (R'q@mE1퉎#CA,c#ܺ 1NsG#\8r$WV*,`n[Sfs81˄HDNsO"Z&9U2"R6KiAb;W!(ƂyF- mcS^AbTq[U.+k#1pE9ĝRipd) EmiX "0p%B;=*i:ڃ xuAߡ-#g)LԬ h GDk4ALR:=LG+Wr+ JU;< ή{NUpURP"JnX5zÍX9&4^7HƵɄ+9yјaĪk0bp9uبVs @($cb E'@_R_%pt楽08V 1 Q)/G%`KV'(V P܀Zv3?jH݀.)y>jL \Xc GX %XJj^'U6 j!@%^26+j0P BNb{GI<{K%b"VbAzS5%P|a@RPYCܹ/\Bi*f%f`/FkLef=| tʣ*W`[xJ{"]}N 0G0GavnN::  JQ2 @A ' -$Ü )⚄B qpjAgF8 9"=76 Oֺ` 簌ma AFq䀏v ;| ##:eV,e4t.5f{*:7+3rq\S>>IF%aٓU K4}.RԟDyӇaxGG* j,f8j/f PSS*Wwxq`Jc!`-`˩Yj&zxȽ+JIaBG͂ZXWUd @dc3k-\*5"et FXQ`*GDe~c`#GOMF::v}% ĐƲXO+L\ [@6|)CC@#c >G<7E NH_ iT+D`ѤW[ZO^aLt3nO:@hgа`7>Й:Z?VvnL5qUHG("!{n2#PT!([MP*buCia{eX,`(хs0 aĹc"{T1n0frw%PcpKiXԥvυt`^Ľk +u a`G5i`{X+sddJ$:UC'3jQZ.R\"ci-rAcڄj&H]d'lc=濶&na)n %$ VV2GI$T"/Ɔ£,\1L[%2EOh4u>LsbD"m=+Wbڶ&Ty,ts@rj^U'vJaAwgdJI~lUuع}g애P+wPNJnaX7Kϛf+,T iR4 lH\_bL~Z?>ӟ}; \Zof2EK.yYh5C)(J"w1kL7fU@%A#8}Ǘ &H\zqWd7+Q|N%(4ggcS}Qyݢ2 +az;m?F}; ω'8ߝ)`>cY(7l,u'i`6Zq~ ):=(Кj”isp< )]oz̴6oA, )(OX, kG('`| Z.lMa ֪'Zz3ͼtPzʩwQO>$j''38:9a,k#gF;؍6jq58{t gq2׷F+#SAY75ƘC_ 1*Rhwb=[e⊝66 `q(7=~`'fz&Ejvf?x1[]G/hw%iۤdnGl̄TWh|}~ ՔbfHN>%NE6_\nK!=R1dH7.Md "ߡv#jTmn:NKnZU!!߸6) F>hTmnA8Neڭ@j6$&2!>vq$be*Ӵ:mx޴[VnmH7.dJ2cڍ!D|nM1":Mۨ[[NKW}tvkCBqm$S5{!ڍvp\6Ġ߰Z+iӁV^ !߸-S0PJld\+Ja/s=ǜ8=g}]!̓Q~FܬR,~i68YniA1ƈrcUM^ )A9g-_&vDƐ t5[!g]u *&O$bdbJ,)Iʠ55<9MF"d\wq]Z q]$V$fs{; VI3B`jZtp.X !Zb ҭEn+ZHVݭtS /W-vpz sA} C%|7PЪjUAa 8 +WoV BATW$FyXr)cD N,x$y$S~<=Z0ɃG2GR$p#!Ȼ1&2 {2]:ha": L 8 >vZ#$OjQb qV`wPm1w6?% DL45ji6ZWI|d3z>M,|\m@jєc8A4CB 4 &c  A"p,8;0*esA惙) 㛙qY:F}*TrLZܹ"e۱冘?^?}f&OPHî ܪc}u`GI'Do~~,un|$E7'??,Lf<ɱLN˫ɩ _^,xY m>kf+,oߗY~kZڗ^M.iKLE{'7l'JBb?zipoY^+DD%qWĞSjD HI$TK!Ƹ0La ' 4‰nf恕mW/_(Lr*v>?O6?bbaZ,W?[e`Q)F [+.JbsvŽDV|m /]ō^cq=yT/_Ef+ʗ'\$'s;= Z.fJKOW6p ^"_faZZ-2-ӹy5"9(Blx(&8<TTP DTnfw}ҠWe,*΍jh8:j pX&:a2c(V*&)Sn4`1L/ =+S"d%tnO0_|}/Rfly9q\0Xag <|'+m6 Fm"n\0]{{Y"./Jُ|0!)ro#0ѭZ{+ES[]kHǭ8٘w)v!N1Fh@P#/?cۃ&㗥SFMө!%pc̹$HD3@5D< Lc%#2:uBxi.Ç1~oBDdjkUl&Ȅ#Z FejkLcNVi4͔yC)$!ϯdxhi27|[2a",I=\ߎ⅚%V߃ֱ>:Ҧ׬P>glѫ\:Ҫe[q-) 0 YObӔkb2 ~+>#K7{t.Yrk?qDTЖ,H!NҺju]gy "T-ڮ7&ܗc}p\YmYFJkkFOS;%fjikK {-K=׳dj?47![{([_?E˵=RђH™Eo.8 4 \=d.7=^Mv Q|~dQ g/jO6a !yBc!UlfIbVJ_g;@m3GWƭzݹRf RzG"(&|BnHxLhY13!RKho?X\Ko@t;0 L8eu?|{5UQ~wcޣ.:b,:y=N#e1$s7sKR(nÄݯq:?_qD/=l9|Z?Sg~?>L{ֻp3/^<ֻ{-y2PEgZ.B< ^$Di.2ͫ"U c,89LĠHl'qǤY]uUV$H!aE!"`ER$ +9(I+ZpNÊI*0"Hz_ԢpfD1\^aw ϋO.Rߘ.d2=D1]php߿\ɘwzY*_H^Q Dt۹<<\ ds vfu7.X[{ Gכ/?.wx@9x9ʹqva4F-œm ޼|9ք ccj0BL$KpJFZ*SUk8囤|v Λuۜ qok %KP CKjE0n֢s6w;ܭC 2l[$YܕR [3as!"!b(ĥ"_eC \(#T%{%06BMGlc{/F&#Q2)&)%Qó[y2 â(: [$"@0Ԉ`̕ UqBN3qˇP2CeD >A=2&+1 bG1V x$ {njUԺIal3(Y$\$W8`EG(bI$$r!J+1R[D#H9Q$$M")M`ȏ Bx|)b ^@+(&b)(1pruU"@ @uA+M(-Հ.NCŸy:Ԋ[Z!E Π\]z0Q^i )fJC"gPe ~8` -!P2E/$[E6Undr|PK${[F7ta8>-=RPH ôf:2NW?,e fuI0[4ݪfCn9m-0[*CD8Q =I0)8^uҙ\c&=cIln_:-l*ԎKm^lŸnN)PHsds1M^iPf˹PN&I\L5{AiŋCɽwy?ʍAu1_Q-k/a3d Ȅ\*2@;Ra OzzHKH`ռcCl(vW97w4|[gbڱ5(^.Eu$ڔC8SUO(s0.`@"-zf{ÍyG^<,wzbFGH=ݮLg7d*6n3 Սz߉AۦmM55Q<+qd)!HBpS;xLΦB9Bmd=ֲ1$B!ΰ(WZv`!6oT0RօP&pT2 Y`#hJWSI{ƓTrh Ip"3^u b<ٙS Mc2.>^(uT;du53qk7M8ZaD3 #tS_nc<BPWb u%uСa5CJL:k*OZyHI]it8gŞu\~O6OkfCۖ;PtYq0y˟nocQS("cJ) P'X(!0 8aMWCJCsh3ѡ,b#v* 7Nڋs3^H~˛lEWvk1eһ_Ru"O|,MT! le/籹ō0 i|^luM{X'K eP1>ZY(` I%]F(͘Kȼ!N F%ʏ%V6]؜z5G4uZVGC) *Np?#P {n͇3R@ i*S+JX*=ogi-1Sv8eDG"4|x}6i'1~jGnch \GcX~Im6S tu`U< i|\ktPr׻Sl=BPzFZ1qDl,'GÆ#&Ncaycp T 0c1Aփ=@bx@gv҃ QɅmRW0"G *B!gj^]dRr{\"quHK>ѩδ=i6iLpK#,E6:Y>PeK|@ r ^59Pc^wv{!J{1v"%65vGHKo냦3dم䒹4Ϛƺ7יo e.OP:7?!4Y &SG8[GY^X͆`(g 'P:VSXlh{TsjX 2u_P3׆vcN̗KR$?h#?I$ilB Qh̪rlPӞW`%'TR6c1$uG.4qgE ^Z hY&H((1~*)̛a' [)A\p3`)pמV^Hal kv9;AuaL/33%H\F3% lbDRJ6 eZTVQpT p.(L&Feɟ7J<LC:.mo .Ѭ+˛ZdRao)~:K|3r2-6El=|\kpEUzpq ز. FWϑSQ;["WMk:7an2+ YY8,j8k|/izNɻ~˛'g7aQ2 g3l_x./OSu%MΨtKE‡9R78$ƋI,ڻx˘tAɛXߩgKZ R,-e&з[E^[w`-BYYU}r3} n,r?P,rqk(1O%uYpr`(<_q6Wʛ*;.= ItsyfuyêpfckdV~_bsPnCE,p5׾"4DϐsAt*EW-%ε4X|^0x:T=gvhaVF0ȳ+σ[ Y i|1˲evW.P@F~uo̡p!Q}?vhN&tcLfieZp4q?V+E4= aA ԭ1~)Ԗ:,ʙubl݇2?9-S%u^0wԻd|豔=h$'HK<+;jkuWk)ɡhh,31NM@.khAk-QOKfLl{,(AW Jh:quȸ.Pb(|~wfрegD@d% SGt2_NpolI)\ akna%\A%ԢddAIpu<-,<ʦ!s!弌u)9{mz(A;{ajigh{.b%xtZVKv+; ̵G?Xj}Y뾞uƘ>m=!\@O6$BMj`Jj'FsĘ :+[l9 z,ޖ)r^TpuOsǥʒB<>xPNc@Gor]"/V^ah"v/ 'c: tިrn/&6JEYt= }D{4Bੂ.(ȑh6 TE7yT2C_wDr|y?ٙb]1=&ei SfD[ @J.XQ`!1umG͒;H^Ӟݒ(M |[9HB!P\0( )dy߹mͻ@P4ߧH pGm燡4#MT[8jG3xkjID [bޗJjEND [׼R-nJ[xA{)4#MWXrv tiVzō]Jh,X2ga:nzՋ%}ٺpzmn?ɺZg,Gv=b)&E8;z^pNBMAV415}?&& 8:գpb\o쒌C#yɮE$vT,(YȾd]16w1kz2GE! $)kpjvɗp]HH [oc HOl{jI'6'Jz;bNYā:])0do vwE ĢZxXsnc6EkZ?f (nɓyBӌXB)>hFy*ӎp(!#O.NBf`G ^7KaW?HưV4{Y:kp1B~Rw C1:klS&޽N4Rt 1ntBڍ" 1}~8a{qanD [M0(/#!} 0pv EaW#/UƻpT[ke@,J ~9-hC"{/`[ѣ/0UЙ{C k#eUnPA-+(=آ@>%U6ql1̖U3U9+j8[=x$eWҎRoPOg.D:}Iqd-9Cȷ8ISm/z^mb(Gm @cهH2,E!|y^(غzş/{Au{??N[K)lKU([( 9br2F2E2aD?BJA(%V:LƆS$ÆB3Ec["ֈix3J!r Y Ρ7ʸf=hvd6`5ɁqUjɕ#xUL79@~͔%Չʫs}I~zz腀%H`=z}ޡYw˟wAy7蹭tMOzI>Ί9}ws´7q?(ff?&79?ݏp:GCurww Q tKB'[#_!V~]9а|{9:_kJ\J܁#NEζG՛yW2涯JFʝًqYNVJIj0ı侸pk(&gg}x.?zvcհw&$+Οx?she/1+S SE-OJņqzKJ7CLs`?ٙ;1.zsAUZ@ZPZ+>!Y2N 9ã@Z>].X7zqyRhlR1s-rQCa}DQ wf("ucL8 qn&[& V)P*QgPf;Zmv(=%B1uLGQZp8ZiUJ_~MŶҍWz\f¶7JrwH}<V`&aZuz m~Dg^6IHVD&2߾c?m҉*̻>Q(p2ʁ$&KaJ2%RI X2ċ"6O2_v'DndĠZi5%ND<QNvFkҔ.s'vM*0::uNdl9 veڍtzI~%"A'g_bPFS "^;Y "9 sOaV$:0c  G/¬l]Պ ͡ 0E ! 2rJ%  c2BjEhS|.KlHI+FJrynYƕ%2#23j SgPg~ v8lk)lEN! <vN줟cV +X:Jؠs4n>  9 :8:wp hrh+rϓ6[lMɍQc걉KS=<5.jd.NAʃB .sG.;!P-Вm_k+^Cv>*h X|tUm9 JM@(˱r4Вj-EBy3g /bi!&T%pE}| wڰ,Ќ@f p=N JcaJqkglĽf>-X\rr 5Z@Z>DsY҇ހ`,ύm{`=AnlEPHw^ar FkfTkOnvKdN2I(dQ@6LWqsCL@;>$H`[Y+h!uQΈFFba,e*LH59qJA !Ȏ: 5/ ΅Y*]Frf8SpF!`vC/ڔo94Lckt|W#v<׊K}4.d0?l)u5 l7GDwBDTwBe07CL0AўTnet+"&obH- DweE]LWdI`}L8s*w3KֻU#,d<ܔ4_xa&7/zgjz6dgWy\+F>Ftgar|`)Hv5GйɼȿZAOXh64)DDq}#JritxM 89=.. h2;G79 sP1j7И@If ̢9o'O즼sne?Q1PO,iEw)1C^:.Wk}'!%:*:uZDE0BDKo ÆT /wmm$z9 ~1EMB_mbeIY?$%xgFsڤFW]]U]RXHH0+=?0fu)}u~ieٲ%c]##|BSU .1 TAքIL HI5@aUlS I> gU;|:*0޾ƒ% g$hO.s|h]dH-O^ƥD>|]9,38FoSa}uA1 9c@T5ƁO@ 5 O/>$Q`T:Ly Xm<8P}I(]Oo?~l< 2 $Vvx`aj1)K&:(u)4FKd\sT!_e1^2gp$(H4i#} Q@mW.[S:zp|c)  k!8x[n?*Bj2^yI_As$0,Clq^ܑPaij;IQs\yX*4L_hBp go~8Z7 P=* ŵY|j .i\PFM8o,`wMA-ig~vl,2WLa!YqxLX>^)50hhWWU Y /4Djow>hE˲nfS;iC_=hmS3Mpd"鲚y_ƻA<쵭9 l lzPB/S )7-^L=9n`y~hrneȰ ^t-0;o2̲ms|sX ~|O@`N|i/'=dz;_nr VR׷tUiP|sZM/4Ⱥe!i?r01q~pmEN2ĘW4{M0ORa<È,uo%yi#Q<:\}Y=SwoCXNb1x9,.9TY!I >w$.ʡB^.1!)REJSB*Y)t4,Forfևb.W=%+m@EQgs008T1r q;^ a1EaVU9]&ֈQ[XoF(ŏEXp:Y43"HkFY yXq-){HN| UP9F+m@M F7,T@g@AS eXH[Sn )I=gAJ߱2 3 eQp]+*u*WT*PJ# {3gGQr}N"PRSGIeaja &"Qv!Ht)RkY)KbSgK$ٟ$wHR)M uH-rRla?+_A+8]4Lt*)*"GK촲TPm1yVCV3kr)JdM>ENBϒFA"Rvdå:|YQZuw˖c0~u(i1.Qx޴ 2h[d#˦*~@sA'`Cf?;m>nDo k #Kz0} xJ^hKp6~8~iuJgN {Oª5Zڜ,?B`~|v[AJJ7y5Rl+2s+z_L~_ ]_rZf귦_&"<qs ;-z&1QX8.B HkF _f˷0Kޠ{˦2NM݀_c._׋?x].>nw, |oaDa} 9!eu^4ܙ"i,!̽봆O_—I^>ݗbx`j5Sf8*.0Wz{ۗ"c\ ֞ 珫p Gk$.lr uTc&<rfImM0'Rc}AkpAdg"[Rd6ve!0X&S[Ip9g< FFllY5c6c{ e*z v^5'O\#GXhwP)doh ({#LF쐬+kc"8ck)UX+\PLq 眱D *Bt]!޵,GƴT+X- dS/#n΅:4A!WdHYQp$2">XZ̨r鵉" \W5P:¸m0S#<(AaO|gGǙq 0˜VF]4yENgir4%g}Xm0އ2}9_Eh) n>Wi"z; ֶǔ;…B';*I1rDY) PcDZiu4Su{{뤐}w=s…`Ö0Bjv^Rt\  eNZ£c\a0N:ej?jyv1A] cWj0vuv1A]Lj.&H=ˎ} IS8<,giIʹ5s\Σ7]~0O|X8nvfOtz;gMsHmc4p*2[,cD'NY*<.}~̭p} 5#!g.92EcTD-2F֟8բ3햿ESR5!!g.925|PdGQpkr1(#:mTnSAXg-vkBB\Ddj@rGKއvŠQEdNܐv?fQR5!!g.92EŎvL vŠTP#Jq6SQwqTen HșhL> &n>[.eD'*ڭMklߐv?*YR5!!g.Y2%j7[-R cflwLlem$E4GCn6hρ1Eqwq%[r"%SRlvDAbPFtrۨz\!d"PR#e`*Cfa&"Q@.|$yar0iDRFC$MGmSA'${`bBF=Y 7@R@7.9S%s15YX- `Y8 {z-3̅r83XErq*P12g:/-Wkj%#P% `!4;$t;+тOiI,]P"p%JU^&5 .SË@h6q@ *ɢpbPcϨqHߚxZ#5AgbLP]pӏ7?ayx42Lѧvv҂Ju/ac:3-k~sS1S@W`ag9؉<,E|MwpW}r݋рg~fo\5..s,so^ᷗ;]qV.g&rF`(Lo |)Y0iypy0z&X7g@aLp;?g?;ÄfP3E|̂2HD96}o5YYӋDTSvr<`-,D0A[p+8e,3΁ܻx{/%:jX?u豢hzBvu~(҃.E#| $u),K/Ľ: 癳)Tb͸3so5jFGB 5mv}}2R*忪T§}"_RHլn\/ TX?:+$E7P,ry~3)Ԯ>l+ l/2u WI2\%e*+b72u1.· T,(.޸,0 kKg+u3gy=mIվzy=n/'M,.bS6kv NS3ӏ*4+4U~[2Ys32GQEM)f5`0=lu+41P§~|%lZnR[8|^i5s^!^c6lIDOٖPZ!i( 2%S%3a}Jf$/hn,ߎfъ='6R?DI4Lk<p{ ~a2qpsHH-xZ9Z{=y5]ho?f0+뀤iZ&1~!NƗJ*K/|jL_W8- ~uŇyܢW"R sg.2 |5=:0d@6sj8}^qљ蛉Oϼi* Jꈮ:a/Ucy_՚ܖrߢ|E PZg ۴3#\,~+JfKGP+=wU0 :7W$}V[)1#7S6 .Wvn~<u)N[XEK}旓b~yaDߑ-/S:"O&HW7ѣ2 -(@(&{YhR?slz^?kAub,LFKkLͭh'"sݟjN(E ;IFRQO0v m2L-XT&,=p[=eQvU u#8퍓BZC썏&X+TYqW=#&%ƥ4ibz*Uӫig{%&Q(6H)’Hy:E)u\sl)A+}P~ں[9沬%z=ZetpMFKVlǺY:Ka%08i0SFbⰏ."4;h QtV :1;0wqGE;^8KW(*gYXǎOZቖ\ q$=S%.{Trwj=5aUIM0s*@;9o)YeP R훤s0c@)ېOnq9@1տ?߉^©MC[vT-?@g`}H#vsEpiI|jC#Dn R7U|mE' n'TIJZC5-3D")QW>߶jo[ե X>zȾYbj3X^R> \T'Iq,;y)\%:$9~z . @l-ٞ/};CAiCX_mtW|jc#MgWiƕΖsԣ17mwyh ·=;|> ؾPW\!UGyքq25`Js{ܠSҽӸP|FQNoGF,|Ӝ9\=grQzN4'{Bs)| 5ZYWw &)a㸹A8OsھiNۗ,l 2N bYeaQ^FeaIEJ/GCDH .)ׄ@V$F$ T r˗,Znn٫S#04 XPY]MVтh^lmq Ҙ|2R@)͜Uc荳VA"Z6I NΤs P E,y#dR1Cao"ThHn, %ڪ1FB/x5Zcc-7  ٱfT+AZ8cڔ&St8܀-h}Cr U6O) XS3#Z[oiCdb`.$4I03d8zv+ւK1ĩ5w Auw3Ցjuc> BUp㹦h^_Bdm5T#BGL.4ÀjqZ\te8F0Phq ۊ1 \cy.ޙؓ?5R"H6 5d%i=JˤH'1( 7-sZ',7"QyNx,!i{M2":*`ZVT6TF=uu"HN \ɑƁH!GqbI22 9~׫{FSP}NujBLy^+:%.ίs=;tw99|?}q&d/!&> G> >ѷ! niRa{ny&I!HbBJ3Z*rrK8Kw뼾ihWu3kCX xՒM4% 8~~a9iLWeΦP\,*v3Bg-=$wjyH*릺ggCJ%/8U0 $zU>Y|wT|<((+(+(+è *> e!xaLTS& OVWB#8)(PG;˼9t/u  MB DlI% 6$|5#F3M XQ-6HS_0kGqmz YCM(/E1:E%I:P0Yt2:BM*ED|*ɣqI.3djvkWMDZsJٖ4T #fA8Syn \paI&itފD"FgL3K<  ;XZ+$kq Q TD,$Ej!h| @uyNA,LvZUV5VC/=֮ɣeޝL.(bYeⓝ\y pţ Z<& 63t}k+|k[$wTAux<jU s|{a{:Rc@KǿB[0 ukL]Z{f) LtjWWS?[ 8iAuy-~GIy֤iCFK0"r[_ UnN\`lkMph'oƿV١@D~shݵY ]s>ǹ {ޮPE°3~#}{aV\O|#+=8+h8 #'b#W;8W晓c*qGδһ0F(3n-=|4[9^7#˷:b>vYYMs|DeUCSTSӅjb[´,MZ *'Vơ؉Orû71U#~KKP4XNq1q9j\KpiΑ"xBt@N(u&EźM*}P.rse)M&KÄ+z3YX+J7bNbkY^K68-æ,= _6z\^F2L` Ӛi: m<x P 8Ih'jݩ撖NkTK s5e`韽X1N&ɝߌfdzf~|pe]ܚx2 b\<܀浴; ʃq1$J ?R6 ̓7%z~̕+.sΥ'=Xg%VkUIBr] G+j܈R`C JTǷ{TפTn]^Ĭ?_J@PYI]m~?Z,Or0e  *i'60-g;ջt8R#)NqԀIJ;|>?}pV#eDSSnd`U ጲ%aA+tR[gpug=-/y}mcƓ߳XATp dObf8&K*Y'3dOT<-+OmWuGm8XˍT] f_iw[H/vx6v[a.i}62'`;vAtVR飮`݇AJx-.2R=eO^ 3[uL9؎Y)ڙg{!ׄkg- )J/zlMku@o^À`2r5+%% p`zaR`T(jVY/|et B@X`iLKV4^DY)rZNm G"_Q Mi1ʌTT0RQwGU,bȇ˳䓽[e꠲;j܉fZ'#fjc*ff*GKW7׭KZ!=.4"l I2Sy)sw'wk9Q%JQOݗ9<8+В]rU&T~3Q񽬪ew&Vƫ}rUE G//JW>.WJ* ׅqWA^e\//sL=<ɥzrw5jmAM>CH$ߢO}@H=H\ ltG %o;` mcvo++W艥ǒOd$7> V1qS h۵IA{σ/2&MJ+ 2&Ȩ6K-k@E >[5Z`LU Ի!:80c ̰e1 FpFULa:ǍGqhzt`%+9;J`x%[Vaquˆ2Jp92Rˠ2|R ƌq'e`#f[@-Atܫz|d/30o^eY36 ($+z3:Ѧ?j>?ǀS!Cc3da* {.,B`KمVٗe>wa_pO7 QT9 5 <+qf SGN5-:k3DkFCvڱyi4BW/&yPx`$G4V"S&%jDR&w'O<~g@erߣqVg?g>r gsx6I2%6s65W}P+>|wAVXyP0VWԉdjL_"OVL-6wfGwxwO3J)>1; ~rdV:\~^Aak˥-^]Ϗ1YJLuVbvLv ڡ{k4>AH qIr!\)Ϝ$NxcAþ_`ub]LFc1삱Z -a9¾UK]%PWZU*r_j; :6,0:TY T9bRG 2Fiƚ2c wVh1'a/ 3}SfG[<!sŢ%0zfHtS\2zІD#Q<$} RK2y6Ld^9-xJzpf_Rc]MYt;h(V/voV۹||0jf|938*dAJ = øqL WŇWr JgIUT~*Tq\9(pNlJ>i"`<`MUtl5ѯCf$d )3#wQ1KC7deVɓ)0e'Aye'&HLZ(ҦVгY$Il$keν0 eBt|l~o2;0vi;1;?hTߪSi/M|U`* NV*d1+U&4h N2GjF;#PX(~ 2ӕ,FBn]S]D4CQ춁e$.B9E]y“<,2aY5]]D|F9yB)Jl&f|Ob*l$S_.@E[{CC)nO8٩+ j52 qVA-ҦtqQ ˌfZ7fFȦʒRV1'TdW]R., l7iIHê֪܃$ouٷL- %5 sVuDH#_LF!8rZ  ݒֲsoʅ,auykW߾YR`R"T&O,Q{Fd&C6}ZE,:P&!wٜDâ@F=;XLTb&}k3M%+cpeL,h95h,9T,Y =@"cU)Ӛ٧l"z!W!1% 62A=}tJVoC$?_'wyu @*!J@xr2$Z *) `vƤ?!}QJT2|2YXL''stɑ"z*)M! '']NHJ;tL #MnkUϺ3Rp ]e |Njׇzgyoz>ן;ҴK%\w}yQe9 )H%facBb䡞}]m4^Ij< g9GD!7άgDmbݲzH?`p,bCHk9y> mU#9}ג˽go!!,ٌ&J+1APyK#Xd3 4X55+SҌ`ĞjG# 6XM)pd@*9v㣠+1\7*_ ^RS-2Uwr7޼$FP}͟Z]xnUx}J62'?_~xqxTNd>ڟ/gzgZI(.ÒByX1͝Wo֌,CߧRj?~[㱩ǔZ+I\#cҥ|2ek5LvK. }8x6%w<,/D˩հkELww`Ɨ%&"jI*g1Ay 9k$C!d%aV:sɛKQX )yTUl(oJDk> GVeMyōXu\z+3V)$V!_ZJz˛gQv(yUx(JDa%* nRʒM-gv>M;ډ~F) 4+'Q*f EITl^\`p+{Snr :JL) VY`9@iiA)1 w-Cq9ZwR/b&r +?~>kP=^oR=YاHF:JWs<Ń䇟aUESv6WHoʂ)ͤ_lՙxMǾ+<o=r7>_޷5un[G2'Iᙂ+ COHۤ$Fx-D3+}w/?ڑ`o>[0ʞs:!l]e+,eYfb+ߣ^~9. ]rEojDtf"fnfQ eHZrk(H@lemS Ii.!LN;nI[cZt#s7rm2LVΥ@q.%&mwi23.?23.?ˠ)q턡ODUIl=,8X^:6A,Y;4(ewԆdpR?;hG}8elpAX[O<ά5HzKϔJx<g=0 C$X˧Sq7FèBl#$,׶ N)EO~)=Gd>o—$~F'2ܔ_sGzОp37{5[zX:ѥ?:s1YD|N8[6 Hw;co΁lԗѹL1]Jo.5oo,8=}"ؘee3A܁fay#/~[W_m6&_%T^VhE}U7WzEodZ|[ۍ}|u0Ț5~x_mLkߨDٶyގU7~JO 4:d0A7i09eNέ 2m9?:: QL)=?۔.ISz?wybyK&ʧ)*7*Ym݈Wlo9լr.az7Trb]wzki \[gQ0G$';İjܥ -6Ǎ.)>V=ZQ)J`W"eyb3DeF(klۥ `).H0|sg{I0!n*b!+N~}Ӷm|&TDz7r \rmU |6wln]uD~KѾW .E#7orb\d^vtm%˫\նLf5Azu?%l~uc8VRR\DHտD}p 0_U&=t]-DhWusVh&<5"G[ȴ pGBXFh/1\6"U~) @ԻƟV:)?(%ӓ7^!%kkUg~_|} Oc&AM8;B-G'K"kwI, * AN5Mϥ*"R'ЃzI F%pa5@Kn2E6 4߭<Rq@ q @*D&cvc_?mG,Y*$]#:/̹rtS܁;vƖ &0ÅN.;9 aV}秧pܭ<6)XKKI9,Qќ/ֵ-1r2s} CcK2R &E ~ s- JY*hpStajymro0ZK5οW6`ΑO)O"Ċ5O13߄vôyO:ǐ l( \,v HA)Iʤ_h4-ʪ`I.rI%۴CK@~g.JgW= KۇxL5iK!0␑2<$2pw|wH3B,3z(&|}Ԟ9orF %Z/67abIY9!M`GLg-],Eq{iϹ ,ˁ)ĕ?%PyϹ 8ar[%iundM:m(*u13#]^}v'PRDvvQSȞؗ[ JcNA"ArwnzԚq HaEJpL+AǛk;{]C- .Pouԕ#aBv)Š8oⵍ=C"̉he8s@?t7kA|AgA qʏ 0Q~dlW ՊU?EfF OZb[ 7/)5'wR9S/gOP'佁4r OtwZjxT5MW-; t&Eam[o{cuYhV_G@ *m845g?gx Ci7|`|@%@$;uӃSr"1)q7AJ 9isz?9J'"i,4!J0t &ɻ /pD¯Zi F'܊U]k+ѻ$#r F[/&"J0~l\Ѵ;t޲fŲ((z?6𞽾}t82OS̰LM^88EΦ+~ȖKP | s0AA\C胢 $[vBe' Jv ]zi>nA# ,Ɵ$!`bm=)[0k22ep3S)"E.ɔ)T@TfEրERR7?[ğ J~l0|Nl5\5x Y;{m7ˏ "wLvt $ʉTac^wwi7oqiz)Ί՘i,>UgL7Eao ML""A&-vN,J5XKUCstp1Kd jl)dZ1khGiJoշN.bLL<~5DJ 8[ZGё Nx=ں)BE#"Ja @(W% UjEM#%J S8w K' D` Ǩ(0G%%–kdAXu 4B$TB LKiBrf ]35z@Ftgт:2!MfA+3Y #FXzpTRUDUHwŊiR#^f6TLkT"$fBRqPQ"Lj(:R@Tn-",裆BYS_F D=L ;':4ЌqAC̢PPH!Y{c .PQl!S=!85_ u:H"ӥ̉X{9|-7RyAؐL^9)]wk$>&; |u[NQaUc/ʇ~H=CA3_\4ƶ;}p^/&w<=`=?^`׏#>8wcor'rӐj3^Hyݐ<ԭ[_Mopy14w''t輼#2I<_Șv"{Gɤ*c2-2&u9QFq-=IݪB?"! +Rk¼0֞:pYl-";X$Bt"ZC@+XMyŜ V0 ``IhE= Bjb ZkR+txiMt"4`1 E+$IHOB`+$6߿$`dL .PxiD(m,8~;m/ӫ4o"2ﷱ@mTOHTlvI~z}qG#To6BF{nn;vΈZjetv*ļUJ{D" yiBu z,OCb+!b"s I(ЪujtMˀ_qVJU"ZJDkJd *D={a**dCV0 $*V`);@*P/7h +bc.H41/V!([dp|FDswiS2DƌTȍĐyq;}4Cn_Y -I`cuN@d~a# 2 ;'G;LJ;/ w@^0?Cxq`S18g0$<pf-G)/7}/`N0қ[&|݊ůjqΧ&<Ã+b'3Ğ&5{dj6?Y]Oӌ δ| >3rMR(AgWr%ȋZ<1櫀v:ۭ'Iݑ~&{ gȊܴՠ& 2afy& 做BVc` a#0z* ƑZi& jWbΟqָ=j1"TfaTM $Q"(XY4,G!Ϝ F7CQ@ &RUTD 65Yd}"-Q|d%έ;3Zh꽓$f)f(*mB(BSŒy 2mJ1Q;p?y dXl˚nج"Πsae"i.`79=EkpLh`D?l_*7k3\qW,4c5jD\iu,4 tOJEhR HH_y큀dSK+OXxдo*#T@?-UQQc6%cjDd-Oh]a[8s_eOf aqpop'V/\J<=) ۘ)*ZOS]|"ϥ\oZ-ws` ZD}q l)`5 6-P`S7l:ObrӂM"l8Q0c;M 6-_%dbcl klؼ3l"ܓ||N3|G)Ԓ*$n!bb}¸zrƆq-+͑ײ4%YV=z1C0UOHaj&^>#-R/J޸EJ:7K ^Q&)9=7THD}qKpH^s8@7H9_6nR'"K Dc \ý*BUz-q7j `FO2K+)*\Bٻ8WaKU Ѓs8KN UM2/_!)ӻ w`Ka\NUSU_w]}>IY@NCyON  ,>ӛAfϠÜ~^UI:J.4*oR''_tI'Ӯy<-Z_ҐxE-OF|PD&9+[~Θ"A ;eH2MUw{臟-""9$ֲY,u1Ɠ9^ѻhL5!$ărDWKZP:G'es'Nx9=q̩Ƥw7/OW׋e ~ʫ$/{+:U}hCox)/kN W85~ |`lLs}uP=K!?}wYSǞ9:qϩ<szrnn3<;[65:@g1ӓ=,bM, hS }|۩51~'$ e>_S.flGg~Dūu%٨F)ps =w5vHz#J˓C( l}}2E'v}2c}On:>o ͓;Q>m\l[ŜO9n5ݢ6DaJ#M|ݴ@g>;#8+.Gy.C6D1 ,eר{+NȢT}9ۦ,[MEgi؃. <y8[T 1-Q +~םvnn뷗~5jdjƇ8$n}聓b5 SrZuF Pš[OZލU gE2u/ Aڃvs2~XPՂgnx_ޜ ؘM!Y1΂8sA^7V"^%;&wz6L3CBkǠGi{\( !'a)xgBv$pƒ;CF-JxBdp ΂HX\v6[[ ޹t 2g/Ln%DݯNOP07:Kó(ZpbWf"y5L©WEQSk:fQ"=EgMOݠjCl=qI)3aHlBtOfXז"6GaG \.lȹ |.o7 TttQqpBLlf:&RL˔F1Y6I\8S`*u!`lm hCsxW $E7\(b+i #A~ lvʟ=5d#fDy KbI-oZS"k-H5|^G6ra?~7/kYCn%bQ/{+5+f$"1RX _dE$AJ{s]JrGS, c`jqEDcʖ^6NIckFzb_k}$o"ˀʴ6Ƀ2q5 j]&I0;٣n'D Ty tY>r($t4?i0q: rR<P1e/<nu^zޚK)dL(@[V2Zq}xS@er9L:y6$>w#B?l6$! 樛WpJL6PX?٤CpQ,4vjM=5QW{|^.Ux0נ3UBc$>FİYϿywE(' 9@x ZLEE!\^kH8*\dE8o"<53i~I"v{2(blk<%}bw1"JV+2_ (HH @Ze_pED&#e ;1p'&U25\tp )F0حC |=GzH1DZDD^v8 FnF8y˦!)()rykAbDd@e`LɠWYB[f҃GS.  <:ZbтG]p>F  :EŒß KLѰYtgy}9q38^Z3q"P]υj';^pDaGiLj 7vwG>EӉ"fH5MyG{N$Q$(b?(%N+ N H6@lP>vbM2Q5VhsK'NGH<ӇO[W_ק'7WwE5;7>W[,M.!>i2ន:P(a}ԯ25k[P$v% mk7gl fѺBh#B4 D s+Av˷T} 3nj&hܵ 24%ZN.r*+vͣo+T Ixe3::}?/o۵ Xq_午篇۫5w'QOo I߉7zxq͹(_!=^i3vruw鿷ۓ}/ oB[+=}'<]ܖw' }VnA;]~w#tԇ>5J&|R^ߍ7z/Sȼ<}TS@HeښqWT{4hpP̐|!rl T,{Om7w;sz s/Ktl[ׯh?C 1uGlm>Ѕx$Ad ,#I4Ig#\@$\rkӞ zcqF:HUM?ՁP{X[.ۖ_V ISB}c!t0c'H/Ct`QKޕmg_|=Ls=.nK rqws-x"EsWd[}m A(UC4 P wCXb-& @La!CqI-J8Ukӛ~wk0f;ڮ#~.HK'cwo7ˈJiݐ^Rr2pp)q&IWyr6v]}"ӻ=ͨ3.BGc8N:JzHsɜ%qɥ["SUXJ!759S9egVwEZϨv'H~kC;(v- MҲ\AB&N<`{5I"hk i2!`%ċWjuD<esHB&1z)$aw<|lV3lK`{,yg{X,ֳT#8 wsbs޼lqZ1`9 T'a%mG]^%,~l!2G/xrhaA*?3,]#ZX;B^.RGjM"Gq"(6"3Dj-ጂl-4zlzle_cr٬ǩZӃ& DH:K"QdM )i6a7Fvro/#y4>vY= u $]e#gDгډ%Q,5z$@.9И 5qG%b~N]tT6nU<@:$ ɋ\<Z!!vk=Qvn'ִ‹],FEcbVSRlT,W^F|`j{HosLB/ARI^eLS5Qeٷp.>vjM ĸ^ȏE";[5)r-#t@vp)oI6 )@HGD勉LT:Z9-YX g1Y+vQW1Ň;3G&K4B:)!4^ , l'ٗngQSko$+)Q,x/j9nέb9[x @Zf+hd:9,)` ;5cl<>vbMTnz=s" Gpz~ O1SǍJ/]Լ_ aa;AdٙLШ"YmV-_V,,nKUG˻w!Of^ܸrdnEx7Rҵn0@\!(`.+ȑՄ^ooWv[-sirWz0?־?οez4A)JE !1r$LvYg[OZV7limcJ9j92;\)ӲO_OLjQY~DyB=Gfy-՞]Z/t`m"z@bb?znzN%zG%+F-̮Brf~|rQZ 39tr?[ѫO\ą~.CCDy?clPcjeNh(뻪rqcJrY` 5ƬȥN% jy\3\ R+nrh0'ߐEl\3_PBE 5)MyA*( lhTf˲'Fa ܞ.%uK D%W VpZhB˾}0ӈQV"KwgoG$ "j[U x]S#.l1t}Ҹk({18n?[YϬsBǸ+KO!*I-Ne֍ʥPB"ŰuA%.pVm02?c`w-F6kdJ~_H4O¨=5ʻl(ww]KIFզweTш9I TYM/_ABa3{Fb1Clh&F1h0x^srXH3iwKe"âǀ FyeӟA{PΩēR#iKi0ОoRsW#x'j /R4}zvut |e3hac5mn ϿnSs7.=8( t2ꮊeY˳ϕϵgl2QZTᾣ#~ѥ̓QC`f0w 跓z]ņ\B(Ǯ?!9?OM<EoOs3]GļPDsZOw'iO!lIFajbht,+T3J=)@[^.7?:DZM#[Λ܁Y$Jwhw&ʾx3dGgu)g5yd6 m3{/B߱ {Y+u4w8vQ+nۨebZ4>XU>9=KuiOfwvrOF!W>ET-0- ]nLw4n;;Ny{-< SŘ!W>ETbW.ڍV[(>S'M=iQ [n3<[rStcNA9QMJ }k3XK">H> w J*,D (#)kS (AMBKQƞsB]Y#fzb5 p !bά!%cC$/=5 HhI4Rs9F3 V-Eʬvlr;==&zvj~?yu9ތG7+f!p_}n\>޻ȥ}>=(Uu]T<" ATwV.f_99tܿ\-,#=x/?[$RU<㙆M &씘.?z sPrvPRtnJ^b̓HR8^eV[Lf^EaM')ux ]ZAko+/㞺k*CpT>T $aH\ldTbUb1H\A" d2@hbbb c7v$ ʌH@34vOSs_kf`%Ȥخq30-vR_c&H|tTD&PK+$+zɉr/KMzIښ^LZC}xS@1{g;/ܹzĽ?u]P@B夞g?5!gwG㸂$k/.Ћճ>kƞ!LƐ_LkSBt> ?$m?\d2_\0k0D=d#eV 3N쟟[nߺ~[_'hI 1!R XQI#B /+M#DÍ ߯]px׃exٻp">/?yS)ѺM^lZP\# S)(/tm* SZH` d -sBJR(z(nS(@4 z:`<}Љ!s6ji2Xa%ϦTV6e?]t)LC#=ѥ&ԠLRfBE&,ʫ5ɅqtU.hsUTE7!^\_/ 3{qDZθI[Dq_jrsْCެ0{8cP}Mz2Yb~o|`@2pM@JA kΤRfRfA2 ڌ^ߒt%Dq2DD1H7EvMn$$A+QYy!?45dzjxR8貫ʇE!ƭ'|. j㲉bУ&igşQ8z*x hðsD}ZP~BrDAbHz!56g!@싾I:L %> ]^ڔx=ͶK6"Q)'6kSdgG I 3da%)5JhXccw#"d; #`= dַKbїDIڂ a9g`2=6a "B"vLu֞|IѵSˎ 9l ܛ':=c ɗ/T;sk^=mbۑj_xɥBd&=h{h_!KREd>-Y[1a'"Uޛ|ٻ(M QO\ʾw ԰r{1ezq hXj-ޟ0A)螈f*FDs;:kYS/ĿwC:-?o^~ `~!/Q`h2D%I MEƸwhmbsN܋6Tͽ]dfG頻#atAȅ}H5g)F}W;۸KJxlܯbII`3wO9b)['aXiJec#ՆAkuXXn 5ɺoH% L[ $<}2rJ2!IsRɼN\)$ngFIeP[x+Yx9 |fOvcSFSIM+"2U0#Ӭg`JK]o۶W bwP]]p$R7ji=Gv솮$GN%QAJe>/WЂ>-ܡ'_JdoyfCˠ]&U/̬ [ڟm/xϪ jz0, 9!9jV|rQϛ73yz  h:=M|8v"6FY᲏QSvqxVs_-nm젲IᆏAfBF2?")ȼ/]wk"b%YSnτ@۝\ Zq5p}$YQl1T HIY3ERKPJяŴsyCͯcRlzCo%aVktTpD aGϧo^zi fs.:Yڤi;EӶ6J3耑HJ$$0D#.UQBiY1yϘ?6xF$Wr Lj.@ l@FJʠ'.1$TKaI@lO HEAȔa8F@*(T*J)@^Qjm'/Vthzj f{я='?׷p6h So=h"($hVc*X&ִmhT3Oab(sM+Ossdo±LQLE+e2"xb1+LQmc?J1>+*Z$Z@I/NoOSoE;۸f$c$Q-GM/K~Iql,/eWxn_=A M$N&[hF#809X"NPG#dP6:ͣ"VR#M\PDQt@-d$kAXad5F߆6NuFh.At\Qn*>!_iJ3-Ѽ>{ݖfnm2J(g. JRfI xv2nqSgիXi(͌$c1$(nScǁ,P4$CbY,j]n@`3T>ֳoDŽ; [o`8qyAh Hd1,Vk$|wwݾ XY(6/gȆ6v$+_풹װ Ў]yM6բc;gB|E'jkLlhcFߥzkt5с1Cۃqd6<OɎ7x]oa+FpYuꔃ_΃i.#^<*jWqGv#Urn@:%!J6\6?5Cm1F"~6YEYV!o!M+O;~~R#5-9ͱ$j|7 }#}Dy`"MIX)C׼0˽}%x!wp() I߶x).|dc]۩R;چњ$8E ùBo r9[{\NLh ~Lg4rCmQӥUԕcN*7)1 2+*Xw&UXX @eRK1RT\ubq1BRI4 ir- Z{Q8>pt+v*֡M\˽Ko;yŁm O$5OtMOmgŢ/eɣ3љ?Si.aP鉧<.IGr9ET5 7|`N퐬P2&A8>uJ50C /4Crl;p2Gqjq Jj\NL!Jt$L)]gIm,5Dy`ZͨZ3MDq7HmHBu;#cL%':} iL$ [.85;imz? 3iZ6k zO6[Ӯ-4vhϔnܿw\B^t6=t@g{O!d=vӌmCƦ1\ǣn:k՟?LL/B>(7z0d4NTGG tz (LM_A2t[AЃ&IRr-xn\};z9?OƵo<{?vI&ՕyW.Wp_~IՍbxdBktW1U+?%TT(ȮW%_aHvQF9q?r1xk;t~z,gm|v ӹGٳSp;i&ގA ߎ\-*s,B^^c 7e]vg@Nͳ7oK7pݸrVfz˫ \LdbKo؝2̯\`ޕ 31yrه}ni0 (B*Ji&<#T 'e谝VF06Nnl(anU (DAC)-8(sJp57n𱝱h%g1"oqXf{ {dx-d<sP8c`LiLq~vv3݇k4-Tru;KOSYzp!u=Dp]):Z@8-UX ^oxyx W'2W`]Ki> ohɐ j|ۤ^&h +ܙJ!pd0<`ɋї  Dus\ 9~sL ?9nW[iuڟ >j,.'ƧM;Ҽ{` Tȳwd| T*(EZR35mW>[! m\k$: ɯxD1X5F{a %'X9M-q4a1 eç;3bps/IͽذUFr\ObwS5t_|W^,09{'L0yq7^3 =ƒqaԒnrc srkSgUqIw!pGyI8.wgbL@5Ѱ5tÀiʷ z?s?{WF /=E#= wy N:ܔޙ )*Kɪ*_&%o=n&u {-}|ycO5Vǚo]cB*k( "=/81b ŞꭽVb%3_h ıU#ftQ;yWp50Kʝ{=љ4] {#(龤ͪ^j еM/Dr\]XF3{,оyP YĈ@Z|/4qC v,Bfv>%ֱ߄ֈF0\ɉD%$o\23&̢AOȝ** >C >tDqÄ /5ͥ:lBKyxBh^& ,S6v $f̉:$)nSH }.I&?㻼4Y;,[rQT X(K 3'0%AEɔC&G`)oK^HAa[Qr}9C,bvA:ErQJ}%sEBFQF7Akft"=s!!2)$B\im(.QvJY!@dJ[A45rT֣S!ӋIdAolq cz,7WdZxcN4qRFcLJIE8yJ2;Ĭ&ghEnM {)hMXp=0, S.ÿ7cWLnPpTdfo:'ߵpKfZAy9qdܜn.~]%Jxs?[&{ߗj_yf&9JقUߨRf׽ Ԓ f67G̍ln~jKjLkUs~gL):KO !odr @w(E!EQ-$᳄C&0RFl&֘ZM+y՚X`Ү_c|5q6')t#GI7:ܯ~|dJXغb!}݇w}ۡF4%b՜) ԪarF\pRD|s\@Bj" g/he1ɾYR|_ͺ.=o1yFxuP6MuCم\/;6C3\< s$o{K!;jīVn3dk8b -vŎq1&I+6!k:Y*C9Sٚ͟.ր$ l9*OMQuoIy@4~:k8yBq\;]ogN =H]6 ;QHyi*4)Bqť&~{)$*;"1:&6h[QÎ}co1tdg>&rA*+aebHn"='IF㼮]K"36YUNU!>plwߟ;4ÝK;E<;<M7l_D8-RwZ*]zx -ilQXGujJ~t~ AChA qĕJ}䐚ߗbZJ.&LQtr qTE5}>,;45Y*+yjl]H=fcU7\x6ܳ\ׯ^h=Ok="V[#z.ZKӇn2]$znvwh6I60fZq5%FK֚QAC2WŬhQYHaJ+qV[jޓZޏǟȰ[C5}>y~kO4="Ћ`N;T%߂*P1%(PHKк*vރ=PsXmv]E5oB0l/r3VHi;>3"+a\~؋UQȼg+tа'o%US9;_/)bm8Gc@*ձ-^I >`d} }DiGGtl8t _Kq GGd|ģ٦g]Qf kOr^QHm|I YR < Q~ȴQ,Y a=X6wl|WmX KUa6"D>}5hK U) 0!} <3C\:[vԎ[vd6a.DDHR?ܗJ:i=ULg b]㬟X/fӶ]ZSA*l*l*l*lUaMaʣ rIv'/'b O2uY3Wne஗T2Zo-b1U dVT5f Ksm.Zi6veˋktr櫹PBZj="X+D X+v*Eg> T}oaU !7a9kŚ>Xxr|-*nq#C55b ԣP>l ~InHXBMOs S 3+)u/78 c;+PgoڟG7w~Ko(9f};V(Zr-E8E˵ 蚢O35]Ǜt?ҩԳa{V5I'ɍ:2:Jۭ#ީ4Dczio"`Dhأ9m ,[jz_].⤣qM_F!n>џnhǧyS<[:@`k=8{ 0u BAMp!=ܩP*;o!M."b`";u1fc`. 3aŖ[N: ӃGtq%ݏzCZZg3\槻N^Oc.r8qMJHiHi1y2HAVx b$ C+իr&1v4[ 2_7̅4o*0 vԕa?Pua"vVZiT@ń) %g"y! bV$wH7Y6Obojԭ1=}ԥ(z⨓ŬmTF )0!L97^%"FfWVZ$1NQd>j ;ya2nbn̕%X[Ecmn|(xFe"䐣5;,t`VM4}p,*h,Sy c*?wwjXN ݩa;5;5ۀΑ,ܑrȖ@$|x1cV3" I;cb:߶%N3Gwuy.ϰ<BNF*?vԊ?v^YR-)4ls=U|n?Ɵ?M~/^]P _}qW#j3UџW-&OΥvS6N7)P%+fQܯѻV1el "q'L$y IZB,@v $LFNj4qu.&w,*Z16ޓ4)!0/BKr9r؈gY $bt8kI!O̞Gr#Ys;Ym K `~o_U4観FZ Q+p[ё*D Ett H#% 5d%% OCYmޒME>+i9'6h҉VdUVT7Wx.HK>>C 1S$4'?ę2P ϝl!qrEQ ss bԟ9KOhr%,7c3gd(;\0 PQF֚1l25c%_(|r"4Q"p 6Y%cՠ#i+H'If͖" E<)DJJ5Nd$P#t%pWbbX "k*Pӆ҆zw}'qlt\1EnR,@NiUնXYٵFc{BvD Rxr,WLY@"KooXa+hK} ';s`efJirT)L}۽x;=ַgJVj w/NƩӺtox7Yfƛ1;_yf$?N̠*3@*C~]0gr:ĔkRȾ1qf~ʴV5W%N<ʾ!ؐ;4 Qos &7dh_[+N6ܑd)h")E.(#g:BE&ENc6qZٿ8t3t~<& hѲ r/2ꪘvB$ ;4;|s2)x0d$5r@zOKr1NRGT, 9K6inIa朔EFHx2Ɇ|,#Vi!uv>M3.Al=smhV*ըLqC_5xJ!U#T#hAfjTh)WF= C6`T7PV0F =f$UV5c&yCh+9HU٩]if IIrי`f2\"- hdEB~2 ZLY Ɛ3&z✸T3Z!/y %)؎WhW4Qt>jCe0vޏn.~of&- Y#.. v2)*7Fdf!yh‚Kf'xmBsKQYR[@dBU \oBlb/;7۷M( '}\'!M$'Y)kLšut%g9JMD>]N J޵m,ٿ"̗w1k.E6F|ŁA4x"HjD!όF"OyX;m oL'宯hۍ~v{wEMS{+ү\6`I' Ӊ&n@5a2Ebf-BLָn7OCxN/g7PN{Dzu=AY \)/ ̫I9h٬SaDMp=UuEz"AM[9aXI{uY`kjc\(C% ?F rh鈉"c!a IL1~i^]};NHHqo!F,t˴{'|Sф(,{@L2]9sCb#Bjd# QC.Br,b#X:!TBO\eZ KG.L4@pzNnfej5*FX$"uS xY("f`0+ahMWZ!g9fժ(m[nhBU bnlتz-! )#m ,@EE! uBLIU֤N _EK{|6z?1". 2(ƒ)/4QbզP bk$ArXz>A'*p9ǍD9LKOCA3[[˙W[/)51ָE2*L)٪$1yA(ўF4-i3'` ^Ù X0:Tj204b:*;RCEE-8v58&eЖJmǤзpȊjׂjH&J; Vuf \2 ДeZѹTv()y)ݳ`RZ407s<AdUE8[T!3(EzPCVʼnEon*!-/̥솫ƶ(= >ܴBء66D2bU WjlqoВrhrp Cp#&Ty Tpf0"bd/dV1 }㡱HL)ɷq _} 0? PnP7@p>5'@g!w2-!bJa5'\YTxZ%9ٺH5rS/m4"k>nKHx_t^(/O0fʺ,Iꊲx> Ϫd.}W40\9Q3 Ⱦo9m#+<:XKUzlooԡLX[}ޖd29cVP!xs,`츃,aeVA!~9ѧ?YE*,V C"lGrW/jXϡƁ1gwIǁEm64 Os)*Ÿ>e:*3gcrM3֭8Z+uTi'8^TͦrW}\D5.Xg@u':|1 `ak`+p1lh%%9xu/m]um#&;N܁rn| -6R)mcXZńB|o#+ɷכuĿ-b/,00kfTkF%s]HO^ f >k¾nyPVzzSnz,ǫxM܁5;U/l}BR_fo _^x#4||cJ<eUZs/ 0{wõWt׸M\ /۳R!MzswAF[n-%a6-QΕtmibV\/K.CeJD&5HH&I?&+TʄɗC) xu ރUv\S9vr"io6kZ/C-@-˧ka 9=TӮs 2. v0Ç7ӯKT~z\yv3g;+kb?L&E91?~M@s~ ǏxkxثNSnQ^V!^~ruo|=YkRzZU S'꾧W!BCe-YVqYʉd04mQ߅҇VHHkz{}|^g{ǶQtC8lmطǰ\S٧y|5tQos\EO\ x$q BR{HW_ҿa64|H-yTX{E.~|< n=f> ?PHN܇=3^FF\=ꥬ5zz5ﺗi3U R"DZY"q4ge!>MŽ g%XZJ1.P9z{qBj ~f Ҝ~<",ID!dL"& w2xEX簁ЪF!yH{zӯ`wKP0bi_E9A .WwY?Lh@jq`tq "\brȇ|{(*&1K_h{Lr|E18Ik9mv3dF5KG]gc=P0emo[dq'̩2#B^)F/~+{7#:wkAi{&Q 蔟λ~кޭ y&zM!?;Y_VYE~Hnl ۶m7=SV#S T62XEQ"ΉI(-<0V--.4%ޏJ"|zCtit>k\7hޔ#t.? Qt=RW. tH(x/U!J&ZK5*V&b,JN8u2쓞8<]*,8З4CQ}Uh G"qx /4 '%Cv Yd=wTcvhqdKS@F;yԼWl[7$+ db3=fPLc,1 #`P6:H:T7F4v$'z.YCVcP]Aj]JXbKWCL6kpY@m55Le8~y Zqs ŀxR r~UY;Ld XmQFA( ^F@ *r+B s ?hChqt%>KnMz'}vYot eۈl3YM1SuW]TK@^W6`Ę\m~{TTkbAMTKYK%c@W FTѸ@.JݮWmңZ|. s>AMܧ4׭}潞G^dzD<7T_+J[qPF_ J4&y8xnB}a{Yo~vuC-ެ &@tC@=_W_hADe ] 1^V? b4lk˹> oo RYEĪtꏷ ˜ ui ?\G v zıGc}ux>1Md3)ٹOZ8qQ%bʄ{d[AʈIRbO|)χncFՙ0B8YN/?bȵy>xM3C-骄cu,LW`d壑Ē킋ⶐVkC还AQ+ΖaM,w1̢)k U9 Ϸh%&%?gnm>ɧZceg iFQ',Ia\%R01$ $> 1V8&(A`$퇑L:ׂ#AklU]J=#ۛj4r$`p POcD{I +ֹzeY y'9اZRs.] 5V~g}!w+=Jտ;өg5nuB+C$1%l&Ik) 2g>]43PE@R/PINTudJVIwK[@zs"/5Gy[˜縦7bkprpg׳Z.]~lJ-sM+s;"S+|~ηrֵlx )KWNpс4#ORȞ(5nH/Wc)u?VtaAJ;pϿ:K|9]}7\".u)\6yK +le.Qh'ϜM1rU9rAM#~9H3BΦ\PbC-a˙`Őb.,oUW""P,0#b@"ދz/꽨z9Ř2|)–að0 vu/ jJu oBV s*;D!"HGĸ cƆA҈d%ٴWQZ!Zn a”&-0m~tX;ߟ|VRZ px} cրt5\CߌX`OS{P `4g°fXѰnXn%j_bYj5jxSϖ?rHR|#kD9BX2]k>?W@P@|Oh;b^ܖ '-D )p_ Oy(2º( bl "(/(1҅F(—l0$5иbfJWOcqL 'gw#sU=bjpKΆ3s=Vos^zܜPGN4HcQ2Sx^XQ 2/֯ O4bÉty9!W0cNǶmR:Jn ҕFGCKgKψc$73b>>ӱŎmK%ټ-?sPT)O 5eFe)P-J [R$.xPj 9L { "O6?9mENLm5?3Tc75WMd!D3b+;cq,R%!bSkr,-xT&De2;AU^:YU͢t{Հt['MkmE"6s6Ǜ#(VQyɮĂ!?@%ƴ0@ %"Z&:o+d|9ysfS |]~zࢊSZTSߜ~sN G^ݿ?/'Xzd^K1 p,Lzx,u6졭|tLs}@m*BV2GOa7?GWZ񴾰<Ȭ>F o@o%,p@9)$MN!/^/(EV3%?ԣShE&xp\F,!;_Ge]ɞ](BC[jl"bRSo]{6' cR_ xTi XGAwh#ۏYy:?YqWGlܓ_-+ \SNbzYsF/uu=bԛX{ `:quqxWKvLĽ-c|,]cz'w77N%ӯkbX;OV\P#=ʧKkv%ӃWNi;jI:yN(O@Gw!yvwW>\~tw0Jn ߟnn l5򐮎,=A / /ͅBac 72O KƚIg@E;sqĝ1+{two`-! #IDXoq"﹞1CpF4G( %ٌ 87ޘEAb^\PB  :1dx0N5TNXGD_8_!ؐ ~{ +e=mK^T ȅF`7ϝTN>`J mv2"ѓ p_Zí8)]@\Ma rD@:6ܯv-ڰnuI0gh2_mӺ7oztoczSuƅҪ>/*%=wF,::͟hSrMQ[޼%Sfoy6n~ӌ!/kGr6zAM HVArYsT9TRDET.A QLUjڞs;WgSTY]VN</,`ޢ ꃖT_p7AWhSx]:hHG8jpDPس["#w|;~z'vf'$l;`νol@>3pXЎ}'"zv?V}vO;n;wÎ1k/IìCCi(vG΂ UPP!TغDLw'*L#WyUd+lUr{?U?˻3}9z8?<{sEfIƚh}7weyw`/]~+}LKnbs=ltzQEU"܎F8ץZuP=(TAkXd:~r=EaYF0sQ R<*Jя?]iNEϧ7s=9D![ma@(f?)eƋX"l%/%om!TLK癯Y߶#͐vحpgl\J6O/џ/yWvf7(ƘOG hvރF %81GK_xv93Dh"[5+qM9?~8))Jto*Jn`R "c4hLʍ [”QJ i㘕z;3-fYߜ_Ef3kkZ GnCBF8FWvJ(mjJaSbحwc?_&e wf'aFphyk$h%z_8+%o,?? 8^:"0X^߽:{re Gd:[WW@ܦߑ1 N-t*W8]_7s[`9`1hu |Wimz,)VO_blK})CbFjKayH}%+ FPzG*UWMIG0W,_߀{X9>^7gIjU؂qe *Wl'1ԵWYq$-P H SMKHI/ ߻t ŭC7$h鉽etZok0`gD*[JݿKxN OqKs8cǁ:C9vZ $:kĩ ) +t49,vitmAa|:)M~9R)b`f 6V- 6k" ɘ;Im<{ ʁ@c}aوf$uA(%{/ 6A9K֊➔~$=h`Ĩ\ZvDIhVE'a#:)]~Öi? @VjsQ*pvMi0b( 8Z @𒺖D^:BၫΨ\$Ga|,=sCrSu;;hjώhc)t;/} d;7\SumYYbd Q+ykʰ 7`Cy#C+h8 ] kPcvTA 5 JR `j:ՊՂ^nWjS3UE(m_P|Wg;;O*ѫѿng2ZZƢDטt৳3chu1St_Ɨ[?~t6!TǕWLB^͐ZKVJci<(hӒ|"#SuP' j-$v;9i܉&k$+es)U{F&&K$L=u_}U ,Կe:,i~]9|uc9yq56/C3OX}1y~˓Ok0Sސ?y1q+KSrZ杕s 2.45?t{׸]ZT"6[5aDw%lC$'̟h+'p]6k`:+Zul;콣u ᵨ.?=o0A w6!){)f{ZKgj{ܸ_%k/{8 ;ɗ`Mmi^"i߯4-5"M=m"Yr!OgLߐK@o֢7ݖ,FBor;uo9e%ϊWT6zH6P xrB8<MLJˠCcfÇu|ܳvdp\8hj @&V~HALyukmsuD3h)KH$=h3PFw !ѲǸR>EɬeM~ֆhQXg`z#7ނ-JnYUAT0"[?3u5Ҫs:8u l?{M+ !O /˼ (4fxkܺcbdzn(qSoLV Wuq8jҩ4n]1DXrC(^07":Uƭk8n@0ȱ;yw "(h#( DẔaTa_KXovDą,<ݤi W6 :⯸wS${yvǟ?8Бb֫@TAWө"tMX[ :kb14Z#rnڗ ۤ"2`客^YpkMkFKfy=ZԔ d%20d֣LM9Цe#>8:HXnQhͿUBZ50Sm`ƚ-D 0}~X?r>2m JM5JR]:gά^:z D^A#|p ɲsI'f8(ɪ1>K SNdM7_nmYvt[?900i4$)*IS+zooZLh?͟qۿ"pu_aL %OcUI=.Dӽ舝WN=U0.ypQ N'<2j5eEq苵IV󜑝3^`=sWcbQXˁw|&h_P( jvpK,>Oe.Je,OHjAa(\66MEsRwĂel7^jT*#0~{Oʰm [WL>:7MJHGa@h;x_\#2[~S_qSt[(}4K\/ [^5W+Wh诇cU]c+?U?0y߀|n~3]z}8Z.D55+^DE37o! 2}(_C_FX,:)h=:qV˗0-nNPѾ퍰B$,!it34>γ=\Ug<՚6"KRNE Lr3ˌ=S\(ʧ=J4]i,?= N<5p n/7KpV7?h٫o}S lGO+g&u:AR3N$%*!à<rw\-f"L?±}Q2} ?:9Oo|]7{ ӯD{eI dB%%&_TM:١&ѹ L-atw6AB4Dީhq*xHJn!ϕ֧a*;Nj.7T@c&tәkڇ?\ h~/#{AXW?hO]n1یe Qw raƆ胡)pPVbeŲźX{zslm[2[#՟j&{gZV&1 ,Ѻf({z'p1m6imߩ{1!POc:TB,qn&a%uE0NTFŖn"˥p*fE60@BpnBVbͱ2)dkh|Ũ%`ƃw9`89alyFh5,aUJiɴÜ)D㰺b1&3ܜ[mzn\I P =6לb ˌ@Մ\g!&r02I fQ !%S+ BYN\쨁AЊ45DCLXffamƭkԸ͊ňh7Jk|^7e.[ GvՔuAdV6_q43'0nK~~̵) ~鍃7_nJ׈?ݏ|\,?>fh3XN4Ci?%ޮh2(fbꈜjn[7렺5uÌU'D q%ǣ|z^ۊ:%f` @ٸzQj'LP1rT@Z뙿](#'Qx`=C=Ê3y N(elmEQxz'B'_5 >kT(mӻE3iw܋ K.Z4k?v&w[ro&AI/Sk)R}:U`cqXA*XʄX,)D*( Ld6mj0sʝѮcd =:XaKvIֻPI*GFV;q(V@6 F4Q]C=$JK9b}Bl@í"XE[EJ E7***. `O*3{3S‚"FMk P UgSI lypBIK1ݫ졤8Ko?6KAi5Jk=>Zm7B_E[9& IǡiNw+98mh1oKbaJz՗r6G_F{J| ?y (,' O)K6ɤ! NMcs)$ !頻a81c:OHtX&SIZ"ZX#wDMQWDT_zfw;g[WDO9XN}%Fr^nhQDGg\rRv ^͘ @;b9Hu;.E&Z#Xj2#&MPCƬ[.Y\Hҁ{ "2ڢӠ\h^4oC;m ȸ>mhI{jeuQ ѿ?G,rj9r;e>e'S*HSt:SEf&ww!"O=QP\2D;xD]ayB5ZBk+P1jѐ:בu|בu s A["(Q_ <ߪtdVVpF@.0Dc&9D#S*ܽM"5Q:@ҌbfPiqARH g&'$1h$ԋw4F1^mփ;DKyQиu q*j"K#O"ZRalqM= Cgu~wT?4] ˬazYTزv,:HC~aC ]zggQi,E~JDwKmUלvފ ڰ!R۰o7oX{Z6ۏHvj?8[ëpsa $ &Z#57eb n^޿9玶΁z݉B&L:V,^fYБ g k͐#Ucj! ?m2 ?IpǑ[רq@:Ӂas*S7}.>rq !8>0H:MguhtxXū[?8߷/4z>>x=?ǯޕzA~L2X0[&ϛqǵ׾\Ǻ-{gppЈ49Oa>uۼ a~{.#d∬;q߿/ˊQM(\> 'u1.Vn᧣73p?Zߏ:#y0̂fկ<_A7 MK)xElޝ_g>U+6zh;5 a.v9;W'8 LeTХl)CυhDRYf-[X񃝷8нww?!C~rQgq_5VB\rHtLXOyGʮ,VPcZUB5}&/M˜lk}d>SiU~r5rΩ)nMCRA]e6w(pgq4n7Ǩ4hx(h?e8Ż2_ff;_Xeq]YXal`%p0.J~(&pSɥw1[LE qܥ)1 ReR8#s0&FS\*@N{[=ԮUd'G4/ d\)rhgSfCiJ#SBuRj0J1A>ĸ;@/0w;"pK,>Oe& +r+*ǵ1ӱ # Rnf֤>;zcI1w;r kTbJ"ץ3ljqI˙ԸXq![ Frugm׳tn͗^_ xW,֋p#P :W~ݦ+"00-yq@Q_4q֛1R$$So~,bVfǫ1|՟BWk.5VnoJ6O [́j>K]x+꤂b)u}9HruK\O/+$_W EJ$%N_c/.ݕzn4n4(4˃2al5}EƖ%{j# ui|9:(N ゆ-Lg0Q/vޟ߾8K=[ꥵsH_+se?"L(v*p~I?_MImq(鼻Ddfg1?^6 ltơU;֘B+P{c &]k!1>|*$bgNC9OvXhQW_= ^jPRӒ-OS5gdz8w5kG(:c|`/,˽`A:diq}C ʂu6~FWvwu?uW g!<Lb[9?>SRúT22I*JNX)uwbHIXy0j&34BFi^j{ڑwTZJaU~J/k8H(aTy+ttZ 7-jmBRKTKp ~%xǍyCADp B[`} }e&͸2v/jw ` 9U!B~3{{ 9-Ŀ ?@'G<)cf%vcG2A@;2a3iƣ{e57Y7f!k"ϳ*6ɑY9eDĸK/7 9`ǚtK 9\j}\\Lıɟ<1wc牆Lyޖ-,@(ƙdbf3K/[QR+4vщRKGJ3k8LٿSt|+U\;Vt}=$Q*Igf4mGNWMZ@1Y Pi(D6p6-DLo[@ MmPGdZ(!MPHk͏ H~TI8 9*h񛁷\<̐680%ˎjT3=WFinMz!HX{Zfwԛ_G|P5cz L;_քvN0Ǵe\6z r9 q//h@ 84i夼Dff4)*R! QC6%˪E1^bcc|QtfVRF'n*0S5;It\hX+6E H uh)3do֓L%3QKF1WRc,(e3)+}h'^@v][q!"YTb`3.ݛ^<\;Gi>hs_??JoG.883nnS{_fRxVJ]Y. ^x,Z-g-h-#Ƣ8 ^2IcZ{~7'?\__;+7^}sӼ%ȉd'ZIFV'oSgПoߞDlq_xVJJY)Q<(vC("̌1 pDP xRKACcFJUm5G[{<4p1],НvYٹ9+]ge묻Aͬ%tHp$qN2Y4__jspom=z UYA_ ߧj.ifDbH2F2KE712pc9S𗝊04BTgAi%7PxҒ Ev`f&HA<6hA6Aime\V=ŭ Fق[i,(W5s'W7<У'qCOҊA4 Yji)'Hkz+)Q|`G[w m3m,MVCe@KɱrS=xx ,{-$sTVI`*d7PzQA4e"٘FC1)2NYԙы3-29c&0%əS: kCkNr~ )$@"'gDHV y4-A3MUOH7Pv@j3|K@VӬ2PQqdAyBK22YCg2sD\dz_ & Sj-L 0Sb&^rhMěBCіHB$%ȣF܆\+&ōp QPo}Ls >,MQ7BS`){e&wmnpK,WQStx(JnBH 3M],X$WZQ<7iiu-qNm(twiZjҒO 9s;! 6dJmS: 4`ӍV'"r$2ǠsU>$qDWLȬ]*, n0ɒ3\i:#iY\49`0Q J>\_W'?}E.ܼ*9O7 xy|џ3UͫZ`ͦ<ۃ_?l.J*i>S^^b-kZߧ]dyi֝v!0+E[Y&81AdHr擶3 >E<32Q*_NgW߼~-3`h/)XD!Y'4B}9-"Yūŷ8o9iWJ86ݞJ1x&fj}D7417\Õ]yzo}M6j4BPogj1'Or_|L&Km^ Ӈz6upL.P{'\a'?^|zs~50ŋt.>ѓ-*HNY{'[|/rdsbNOOnx:P:c\I^£YHaLm}dzb{v5ĺ7xs @#Yt`+z};[M=v2Woƫsm-@4Cd8ӹL{)sOU{2Bc:7`]Κtnցu7kZ)meRSzxeוBuVeMvteLy_>Zٚ1 xG[!1ۃα{ȣ,簏<_Q{h\l{_ 2y6&۟T󞚇X&M-{v.O.+uOCݾo$Bpܥlлd<ȨtIN9Q!bl]5*$JvZ5&8ݓlS]²xzb전łVjt Z'Oc',3>Uӓ?؉Fͤ:[ӓgW UjtNFOOZ5-ȍmIbF3.ըpIFL3.8vpp&:5F=b_i?g?bO.%R٦ jaB/bZ]aVł=%'p8qr~Y~7i s׵|vNydl{avzQ>N|%' x }q?ؐ=X|٬^KnUG5֯k8Xњ@@ɯ2CQ 37])!gJO츌>0{t]20 _JmšwpQէrs>mvΛǟdՌx:M\d]]~>Prf7>ua՛Gݛ1mfmWk>ݙ;]z{ 9/]'-r%5w%WF%[8-G>6SivwS+f'S\uڷU?Oqw$ۭ%ߵZt?zPg {3% Aa@kL8xҐhTpx9K7١UA:<#8NVUԘW`WP: 0g}&0W`)ρ{@!Gx11[  }`1vg $,,!.-WYh@rFyPqL:45B;o 8Ou R6d_h~_gvC: _Ug!eLkM3=E13+шd,Da4 38$76-yFg -a̖\Ւ\R?CRnIsdmI@4@ x֛Zº5ʟ4U\gSfYȼ(NkXjux*8CaLFw0v;P mcAŸαd! Iqn8^:',9BɢdN%$-0! 5쪵YI9ITL?_>RLuZ*h%KOafFIyƋRTT`<# 8JҊ3%`/88hhTdKD)"2s'KikDRXiˁf%V 1 zxδ*@tdI/`֟x"zt%`=gvoZܾH}Z믮?B<}~XW?Я3 _ߍw%m_|Wއ#I्'-屴T˒BRN\#%uPEZ-b[;3sd$SW-;Hn$3kt8Qx\幤~puvaXhiWXN`*Vi %f Dv 5iwWu//>]P}ݽٸ'/\Mp_=tYvBp ko/znlW (-P;z\ڠ/h{I9VEDTb*+sۇ љ m@s r$%C~:srr.S4gmɵr=X};rhI8ӜEX-uج&l;Fx(V2IH9^SqӑyOW^uh}'y^] )E>LB3فI/O 0@RZMU45 ҇Jl8 %3q/Jű]yH~X(<F#4LbYHa0gӧc|Ą Pz,8),f>sq(2 9A"7#i}sy5%H'j*QB$SP/19b;3;|2 M7¨w *5΄ggY96Dֱ [4/qJ0fq6b9(N%QX u0NQshf5:GUXnuf*桂&)̯ `x'kP)\ヅB5jBfIx7Y\rr44Vc 9߉BnN0َL*^_֦Di,ONTGz >W,eW,7vϡq%Biܐ̕HPj%z AvM} lN=%e76TDߛQKm{ylG͆u Ld^ϿF2NV|KHep z!Lp܉;%z[?m0*leMQ9kIM8!)ec95št4c!*9 }#޻WʿR:fQYqeC/=j`D!y&(ĉ"8-䊔DSSL2i Ԡ@ 0aXhw]sP {9x-PVsiWB[Y,7!# y8X4^2w} lBV/ߘt"u`P:hǬ?cG&9+,,j}si }VSpu%¿J!~/cY$BvQkvy+PEY늅v hQ֌GDr#5"n: hx ŌBWuu7\Vhη}CU^o;b\Z>^{t\Upabxm.*fڗ, y.p$+m\}tn [)6þLAcZc?j L9VOdl?vZzs qfFt3Na^'KЦ0²+& SK6ci74x!.aMN/a!r\ck$߲1w*Rn25ZŬatf J?>2HbmS9sM,"1Օ 腠Y$VFUPr<\4vuQ \ E+tQ2C"&a\C23*}]?  2[ZRIf \9{S$Rsƺ֢$Q;v#w7SˆA#]fgަf }c'cMc\WJ_l\Z(EЬ*7΋2QAu;I׆0t V!q@ư !XÂb& ahJC)R" $M19oV ΤuyDV*_NdEQC/<-8)I9Gym)gK-Tu2vs=п_+AR*VLTn4J00!H+ 2Jf52x_e 1˜W7{jX)ťA&vt<h0uv1jA9H E-~hOj96[OB_/ѣOG4eR䌈圣}nJq%5yhݜT[L9Jb*ث*+q:Q*wo-UfdJOV#,ٛScIzA;l,vӁb}6:Y?"=zxg yn|r]18m! ]߼>6.Ըoґ1M6bcИK-.Z9%0*IoN;NLq M(ip@{<\QA HWVsg;]bM^t^jG͓ xyu_a+E_~|˟N~u; *liO~V`UKaަ[?k@4OpQ 7lo:Vf9I)qrH_M9Ïm >}zΚ~4A[؎b؞w$V& d@|0٩1K8Gѵ\7w{92us}N&߽1`0/;n9^xd[hzjdo}|/W7x7h`NSLS?>xvJgG{n^>0Ό;2GpA.J*)\5]6q)zgD=5 6^Ll\)&2~s#;iblCC|6kH1h>1O.P{<)fÃgIҿ?2IsR`w'? M͇Cv۶T+q/~Mz ]'_4r2c V4aYq%hDnʔ7lӭdX| x,xwL.'~ag`ΒwVdAs E;@IfKzVQ<<q+߽ d^5(|{߷da=?e!?jRTZ5?QY)5&x<][oG+^.6cW߻ )r#ݗ}(H)vSMJ"šf'0bKt|]]U]>rxk)Nx\ ʕ4dňn%{rդA7¬R"(Ma ssaY~U4~9ՆnU Ie+E129Ϲ]Bo"$Rv,9sr5W ;|PqXBee?W[d?0s7J "),S8mё5sܳgu^Q9$+t2rڵ4 f/Jc2\W2{05(뺒_9|/7gm 9Œcyo;QhE >EZ0٥r'qmCRԒ WaejG!ݳa=\|?gHw !@P6@t[P4'pR|ݦ̨k) >iTÞ֛~Th(Buaݼ+i!<86CMiuoYlV?unڮ9[ey`,Ţw[w,À|ip |o~84gVDN>~)SDMMTQܱ'{uXA jB2FłpErB079(ϣ1&:dzo/C|u 0%ңTYzB='<:ˉS̓JfdVB|\^O 'tzG7G9 s˃DCxWUN6 ?d*TWBiJVKR%[P>8 eo$SY2j'N"_ېH zyI9Ni^zN$ћ% :wkDgOہ-AuͼKvKPٙs-A^>;[_*zJ; ]);Нs;E뛢mG6E4FoT`\⺶[P)mH|09},N\*cȑ*h<~ƍTUX)[4Dl}k26=vc+SwVsI"w̢[[}gbc6ؤbL_&4LYus6iлNnxkFHKXThHZYe1;yv>-E]N;=KP/X‚Eʢ8Wx:=g5J2J޴rѢU'*1fFyb^YiX[Y]"~HժQԓ(>KZp'Q8;N@J4+ DփgoMUPzPH>Dzk/=}#Xxd1ەϿ&=ַ-T,kxbtv%HփԵxeG@wx?нD%5pH>uEJ|Z3GYiEX΁h×\*%3#kVD'y:Kav?t{P^;qx> ; pfwGC7zHkMi7'h nZ. c0=)5EX馒ua}iI5m e@|I9 9Ԕ!}Zka"*?ޠ2Zw=վ7'e:֑fx;+P9fq=okLH2\mz// XC#X9o~8F6Nfgyѻj_\Evc\svU\GfNX'D`Q jIJ%H􆸠M *UzDa.I꣑"H`HDz֩1I)'6%-^ojۖ/^Jw.#'~N*>kRr]smrM<#хPXS A*zXcMBa/HOY :ļt0ՉTApު@dQξDiڱ]З4uye;gݼ`ƣt {)]fSf2})NtIPE&s,Q/MT VT(SI"(#4IFHÒ^P MA%%d^^"b[L;צ/  J<*|@&(cQUv2p='h0\<jdJi{<m*~EPR9NZy6rg;) BBNHe:L):=+:(::\* ׮L$q'8(,S/~ @x~-\ u""eWuexxk*G@_hvps~>|\zPe,==x-x>?Ǔr p<{CBr'?L&8T~M@k^?NP67h+y1Y^y#oSLw9p&a~ /_6 n^"bٓm%2 ;A5d4iqϯ';ϓW6!hI3o~,.PR T6ryNqԢj0w#elF(P-J m_g&1bN< 7#ζh?w~稿ލNQd$z5;ѻntāݠ 8 9?Y,M%8Ԟ Xpis xofח_/$ϷYiiU}_^Atiu((N6} 䇓+7nA"@9g)W!Lsd>_ gLSچ'ԕ/[iIs(f]g^jAu;%D>^@ T6V^GM=g(5V9 }@WԹ SV(g?p%P=! ʬ筡U)\RM|]$HOǙ\^W0CnKℜCe6BApQp!Rɬ]u3e chp1h#nAv0&e~ 31*`* AU 1, J4B@V ͠Oa%>R⇄"˴aЭal0_HdB0,x0Y-3rh^$SF"ĕHZM@%a"p_"a Tw&((Sw1S ܶ/X͘7n@' xM86Y3Һ9h:9)uܴgAz>" Kʰ&|uɒqӥLod8Aa @|!ױnj$9q#d:[Dp7 `i9e0zQ)U@|vvHo;rR{4"sGո;УwAԋDz#PkzK*:]EWZa\?tjni`bΊw/q\,Y~_ wzJᵬ G{7Ф.EfŌ"~k2ZJb?KP|ҙJ,(A nd$SV ߼ZnσʹyIЏI̬ "5/GJ!ᓑZ*zbBɺ|b!D5}Vzg@=0`F7cT@1t\_v0Rww{ w1cV~bvE|x4}!KY ^i~6%XÁb&]nBK"ET8`BQ},/QmQמe(nu]Uc.5m-0 H#Ȯ@.މOm8פOFqI3U]<̩& Ok%Agl!.pat%S I5LI3 ƸEŸIRT1HtRqN)JLd>ˏIn2ו>((˷F12v Ay50 ~sv=Ʈ, Î&T/V-[$X"TA5Qo)T0"Sm%}.P )O3"=JҀxwL+Z[eH@5; k.AJi1Qdș78F>BoSg:r<>&Xs,\r;~otFt^phpQ׻8RQc @ș_<0111:˜vnA ) ~3NXxc 5AZtuLag4:43d9n2IU&d`4me]\=, m}9w7G*c3h`$X6Q$ ,-]Twg2M,Lo!|olɻd3`K/-g3:D%ҢM2Ar미QRwxv"m&IHLY6vj~&ϔAC0әH'~}6]S,frLVqkoItx3jz F3YN>p`ΰu&${.~M?⶝]ϧo>VOPm)ہlG66DQmKJ91-CwIiwW6 `a[T{[ɋu1S_ۋNS*Q~>?*śWgŞb\]~Q?N~>3T3\|yz2şg>}o۫קg\_|.<1HXA&؏wu(F__vn;o/;#-]PuέǓ=m d.f;屛9.{/qkn͏wACZf7F`0@ߺ:wTM1s$J7G'[ UKxN٭Ț/\]mW(1!>NƳfMH~i]D@YGUgän3ng܁~]ishu_yu= &8zFIzL%W7C̍ BOP hI??v8>*{1 nr&fN?@MvN`p:M={cIڅ0WW`ƃ2xY³!i}n]{y-"?2M~< ٴ\ܐt6ͶT A)J~BnR+YN_3biW ڰ߻sc Eh:c0/.6o;QD+/5bR>bA~7c$$>XDC/P#2k$[񎉛*3ny^3f;_?cAzwc#/VkzYP3i&H>(P p qFઃ$TNuy] 8YvK)?G .uKt2{evp(׹4>S*d| 'w#g$3~7hnIsbʙ&_3.VaԬ2 Cxi7įي2tګBD&CtY_Ow쎤e\GQX 3o> X% B! BTY y6'Z9b*  $ UFqG*pX9 X8$Z 6.Ʌ|:{pJ~',OEf>KncǡUmqg*5qߩ|X#@|wz`Ncˣy2ŖRREz̿ 3d+% b$]XQ^uV<'0fN:ffb*;!L[tX;`h m^YhWv׾Xg})) l\z,ltzɯ8:n$脱qTs.uv8JsNM^s))9TzeKX)*rTx\F]l.]?!ԵTbp4옅ǺmM^hF;i%gtkKɄcݬ.ce*vWlB[Рg”bnfeaޛ| .6Y35GMZ]1ew ӃncGtr (h]`fJ[^`F缚uiWrr՘$''q'V_ر;>Z''[ IqAaWb} d7G^<+?>`RֱٻmfW|y{mxȇ nn&mQ$RZv4~Hŗ$Z{m֒iipf8$g P%JRbT@ ƊqV}ܷ5fo^0F߸i!$lS g?^C%oo|w3BwB),fmF8?cGγݚy[&}NJxŭmWe6Ǿb%47XsHb& \g Ia;iNfn;lP 9:k&V7>}?~F t7&cL7}#w}$h$u +ȮGF|WdL(oH8Io;,( 7D!َsYT+jz˙Z*ygmW8;L$w;,fi_'jF˘{{vQKfs`4WdLM#H>$iJM`H@>HX}ќV] Nº Fڨ]ּ%8KKp @`,4pMTPkKoc=i!B :5T0{K]Vp@qWq@Ըy졅uԸqHٳޢ}]^}O§zoԢxF`{ȲܪbFέo}ĭ'%Q SCp(\mXCCj;qDf,/oQL g˝G͹Ըi#%{wYvzF dv9$#ɅI'!lÿǨKϿcדqަ d{:w`'TW/r(c;dz@kS:5vՓ9Гf菣5shڻp"F9z*/1j vψ*,]#Y#歽d@#r/ZV=bbƇC$=L J%$f]ͥA  c8F;ήً'X<8ր&+M0kh'1H4)R ,M`4L[ͷ-{*@7KU[bu )"r Im}g^qw&[}[o2d\Vp*{ {_cj:L|HNQlJݰ&SlLp?.&U8(<.vJZk,C$VKc9jpfV$/ !Ҋ։I`SZpkZ48'~+0>֊s+J$+^SJ '滦%ҷ5)`]'ԗR ] ΒP[Z_CkZ!0ZbtA%Y ꇊ d#H7,醵DiCdS&h! /jCD#bZ{ RKdQ[odJC++fomxf 4-'W3cnޚ޵? l9 g.Zέ?Stv=0ʗ(g:JuUQD!'5iIk,LC1 O-:QؚQ>j-KwwͰ2dY0:0걕} 2kV夯޴~5f5џV 6=1&=x4* ~[+C`RuR> 0'QE7@ҎrtgFC+ Z5 IB#ꨥGӣ0v+q:pM#ilۿiJ63ala?=2 `@uVAh FA*Oh;H(rd42KC i"Qd| 'Ю«օa d5\ǧlXR'A8>7$O0|:>ݤu2IG˾zL?1Ǘ}wxz|ѿ7w~w<820Cyҡqԡ*0!;1HsW s|#7Ruw֡DuHQuaZG = :4P%Fn8#wN޿?<Ż??Zy:LFN۳OIw ]w}7H&:] wƓ}OmP|*~z3]4 Շ~=Mq}hݍc~|5-GaxDNxh{=E )arl LEN/fRgs'8>vOLSU:>1zvf>C}wYSޘ;~SaW/s¨۩,;12B^772HyQxdٿg}BZ}j5qqk;LGq!wzu}~y5oW>^Oiw[ ۨ6ئd>WUy*[tè3`5]LF|:~kD/?ErԽ7F\0.wCKI޳Yկ gLhZxL:7 g .RcN"=h0f8n_ Id?4ɟ?⽯WcÆvR`ܩOe+UfeYæwA{ _?ٗa2\1_gO(8 ̼i#S7w*[~"/+<;*!8 `HE8d:Q1m`[UxFVwpr_oyXJJэD1:R$: *H! 6cV69n˔!670>pj;2@X~G( C*WXɩyYD9m^5-'}VeDvBx$r#a̛!w8v$6:ѐbNb}e:zyĭ;\Ÿa$xm }+tusy|׆Sff6aZ!Ì5Z`|]Ý.X'& ʮ+~̼?qmBm`w߷[^dz+۪zAI8@(GO~z{v;_w|t8Ԧ|[譍Ѷ8c4 g@civ!k0ֱJTJ)J(2S$R}-^v kBޥ46o]Ynp滰z{z JQoZ" A\ab맲qDC"#ƉfNb@C *A5ptWɒ:U*(faBS*J dVN( TVQ!,F+ъ%5ge| ~sc1=-?BѧJltyC\,@dHXIXDD'**m|„KHMD!̍7RC&qJ4gZD$:~z_44Ŝqs.1)6@Xj7XN$ƀFbdu0V+!4J qP]&u2ETX0DPY=t۝Joe;5j XHCIa`@f #41jqeʰWeiA9Q+?xzdLF^ztr ܠ0,N+m~k3L.S n@"ԭJN:;lGqȖebH-YϏ<A̧ܣ[#bH3𺸯a4ߌ®3rmpgMf`9UnkCmzr2cT`,{s,2(~Q)ջ7&Xv~κuuBRrL#.+mX QE+Vu34z`83xx3)v7ą}6JbtRi0P'4 REö+U5eF5Б;$7Y: - #$Y>${[ CDϛjJΖՎAan)Ǫa2?}ayG8 \R,]j9 D\Ƚɰz| (֡eoi.36v P [IBZkI!]jYahפ$rA^C8=Bu]Ҵu=l $0 bqH`X,:bne'Yu''7arDWT2-];Q8"813\v0G 5/|j΋Ђ0QYbA(u/38#-| yEpEː3%ry˴PC\;t" "舸 -|j,lE!s1+ОXHg0H;/B SJ\hoO~m&_h ;~gtv IBO@tQ}'S-gaovF!|S$"C!"sxhbZƎ 1FR$8X"ZP )ܠͻ~o݈%.oo:]I1aLT%b`;&f6 grW%+OΨ֨ XͅvDYc5(c9rQ ~<Ջ ֺճ.L继5OKw:s73Ɯ-ũ _=֯컋>k^_u=>9m-ұkx ZZ^VB{!r:X+"0Riq^햻26F|?I#OO<=5|wg ؾ|VUOmb];aLAF._Ն?TcB2sb7ʄV3akc 4צM(dh1u$d:GLH(jtȐkP}1C5(q.LW59h3Œd#lCJo{N $yr2~. Χ?HW/KUYtDzX>bN !Ӟ[a0*7 Y!FX.vFQ-d!봌2TRMKRM׌mH5 1w;r kVi*EZ '[$-gRͩƈ`l-23q4ƈI.h>Ʊ_@#FS XHb .#a`n0`9k.4Vp53]jV4CBZ¡&69Imp֦3yP+9Ő4i艭@RQV>_XZg *N"zUz}?^E K5`@ˆ}[qNo Khatx?r74FdM7isļv)@uz4v[#3latF̸XE֛Q8N[tF׉_tLQSfsc+|! S!,Q s/d!|qDJ+=s4s=yYNoLl ME^ד3o#o]jDxDĝvX칓''_,<0lƒ]`A`` R}wxP`rxm&?;10qH,ztJ1;FOS=cmivM]>}:i<ϣ0iru'Md~teF9"i맟}Z8휿E?ŋ7o_?}uo_? tO6f~?˥ۋ_W^>}otm6kp,{I=?놝~vOr YoȀd>Â=V~=JiOsod0 k DڹqӶs~< e|bxQ6)<|sɕ)) )4Ewvfw[&S]L:8S6VgrsWVfgG&i~ScfcZlÞfg:c8}7ϕyeݩvwE'vt.uٛ~@5[ws>kNo5{5}x|fn|h>^*`1Eˡ_p_> FӾ?Q} Vyvo/Rw%\d L=LHts'o8|Iޏ7#u;?;k|Ol3)m)A"gx81;_/zX>=i`7g 836epfBYQAţ' [jƐY"q -cTp*QnaɝUL{Yo`$Oͤir3Ϝ/hYH /!- \s {1YŔ}gs&(t.BgEa4 ]g=VgbYK8 )U-taA]ӳW޸Q+LU-  C26B g@]Ra)IJGP)(utE ~QA#t1-mTd3lD鍩DqR;~5b9W]UYuŁS6Ɂ9Q*!UH̻=-DM0PD'NLgq,$ɗiw?@Xrp0wd6ˆ*Be t`),t ceCA11e^F;R2x`;Řt QUJ!'D--'',iB\O[1WfX\"quށ*trD nΩLG!5޷jEDcgfr?u /ϛZeg{qc2l)~tg1_0YGk$?ՠzG+ٻHW錌<", H'B#zy xJͣ]ͩbU5k:+##2P[G[0xChF-G.ԅ}Le*2uԲq7I[k!" ./b lC[cc*9+Q\m|dScUE\oM#J( Z]=7>æ[@Q YDQ$ʋB"+WQU4qFTR $6@TTLg8G*jَTuMSg N65̋b)(F`rbP50e)V75B@8#x|gBtLS^z٠^NkpɈWK`֔w ImnznUN-r"kK2}ݪGve 6%?wNZC`k}|f9a״۪7WeO-%ӇcY[([&|OJ~7G-]cwN}tO -9>: 7~zqzsY>-A+á>iPNY-:n/.d7U Ъ==c9*B`D8&V2z_gi,-ZEXKFGV&#$aѝKb)vK)€xv)E~[JFT?)]%r3o=!BMCwI3Xڎ1 !fFn#is;ME .МOl= "ZO'V$[3 Je;fiYeKɹjt5Qof8LFr+CkDzu]uqYѧߕ_*Նzcw7jzNۜ`;ИYt|aW_折W?E`O<6Ֆs,nL*kM\ڙ{31i R!nS>ŐQ^%$;LeS r(;TT5+Hc $*brF!gȩޒUU6rr%0dHNZ#Vؔ'|)k; >?)˫R'8 +hqaZ hS1ѢYs`s42ꝽDpYaӚQ"&}m>0'1`DHZ\.HH579FJE8*r"6B-sWcH&St 1̬+?kAXMI [E_>H?~|c_P M۟5VcOommݺ=7ŝ/+, U-} #PJB1E..x NBle#cŰ5N#o9cM(D SK*3] ڬ*Y-lO쉒4BBF @ +F5VҚHu1˃A@UX3); B};tuFp$|6GT˧[*C쎁F?Ao.Ceð|Xk̵MQE9l1DDP .(<IFUj RȦ3d8RfK2 /1 E"GY[3DCzLdɫ"ZJYgߒKZrvĥXWZ!Ǟ0}W8iBI\v쫭hQ QTjN"u:ŖDYmfgšDWO"qu 6!ZcJPXvAj(4D@iQ@\DJQȹY4ʞ._Qwm{zTZp" d\c4Jr y mVV,CE*"7*X[qty`zX[cL#ƒq]36&;ZP #HS+HD` OLmg%/b0)HޱdDOa6oa4b<ed;ƴJalڥ ̶ۊb6\H(&NE;rM*BL;0{QSnϻv*Yx[UY{PU$ӥdwqmr#Y1otjDkl 8xg|v{L@Gvl֮JʠXvжvEf$SQnEߍE= ԡf5;\>=nqh#6i/޲{N֋ Ԓ#& ;svˁCUM zqLYɦyƤc6 >v:P&l /GܮfS%7!f1׶XGZFt =W 9qCu;F{z*`M!@ =ɫ A qz+FL@Yu5hDT(Vх\QcKh'>jx;c` b+sVVi-^kW4VTU7rK *ңx31aK5 >VVV~k?:*mKuU،^ 0Tr)Z R]T4EkL9`"s_m9Y/i֬/h.p5-Qnp[N-]XOEılx*cט,~J [ B;/栃έcp-mROig}ܙBl͊n>ݩkh~>7Z>_Ofzorky6.?H jȷ _kB6h Y,iڰo[Ra{N? ׇ!.apˎDR9>Ewx)W9:Ϟ+4=\\l'Gߟ~|js8K|>MpѲAĘՃWufp483:1.]ݘoKo<з}5=w AO?e[C~Ow[#4 4_9_>fW}Vlb2:N%GƊZbBK= =H`Ȉν{!sJ ZV=$,EI A#XsEQŬĢ]b Ҙ([efV=Inqqy"E?\?V\oY^݋!7Z)2 }v᯷~g6Ö0p>" 4(痟ۍl~WZ 9h-^9xB6y9_Ebi6","O?^Eص;\Ak:(Hzخet|F@^>qӲ~z&>J%mf=^T>ݶПǥAxT.Ϗ.c|s=5tpl1C~\^35m3ǴSZC84r~^OGLRO4EgVg^DOdX|$=k11/LZ{Ʃs|7Jk}ݬ9"K 4 KOzQ8ä+܋B;qD"OzSr:BOh1쾪%U Nq_oήꛔ ]6mp}Y(=_"֒DWǞwEB;hۯCOf&ߍկ2Fλڗ@Iz7?;p~{ߵF޻?f-l3Hh}U3Oh޽g nvfG }(-ZwJ1KN/f] ޾z ]hwbp}TKk&"Ųݴ~(׷g7\oۛO7?4nNϋ|u7.D4+OS9:ڶVG;__N-rhWmw>;q=l?rp6x 4LolO/s==eٽSwu={gׯ]]wuMbDAT$4r߻{SUOQB %$u+? c  }[)_jIo,j9[|qܦO1ޚ.T)hR.ڹ@qSDB-ʎ*1l&C,z¨B%sVZOvvwem$Iz %}0m vc{D^e!I$)ij:X Y/22"2#B`3`r#'fꖞ]iF;t$%qF} 璌KuJ$HUj|`yoe/IqƧDSOt*&r9a/ǓOlxsO`"I7D3)͘/?(qorL|y%kr>pXhtDoH>NV:e*R#Ր;k\WhskvP|Of5A=[-D*N#S.N6g}nGNWos%(.W؁azܞ/M" Q`0,x8$^Y3 r^\KF77*g8VPX THp X4J:=+ ^q6wiPQq'j=  aXs's( E"o_1r_Ex+>S pDA&No3NacX4I8nYeq Ȱ!V1m:x)Z/zsZA TbJ<׭~0_}-ʗS ,CBcf PGNi[mBl5AN[=HU!|J3T!D>"S{k@Blh%[Nڏ'] W:@[cD֔y@Vl0ԛ۹JVCxe5'A7 !eJNsN5T%-Y`M\_ p(њ gVd!#r74}3%;;iZJayoP1-Zs?hےv]I1Mx\|c~Y3sh-k: ?[o.$>fڟ$p0TP)ڷuo?JN f¦n9RN(7+{u+Y]q!6nNNX'!4ym< 3$p%n&.q*1R")b9c?Co"U,D2 Ifλ>Nb}ss yLKԟ]ao|X]ϤwJbdNNaۤ~tDh5e@6YTPĬdipvД´p(W8 SVrcA&?|xʰ Z{݇ldߙI27F8p!(ĸ%𾗗Ҭ,!K+٘);75d3Q? ?\HrW(Fr] 睁90@];YS tee^_Dc6ucbxR@$S+nl%܍p7JxF10 p Smg:4U E7 ` rwUHA#D"q%[$ w$ 4V +[!Z9lJ1FS fT!yfܥXT{90c+;ꬸu6;M; >gde+Į}XixIriOe$WBrU;B,'=XFz&!KbvќhZhzMT)B[c6ޮBҸѨ=Up %K{xP5S0`!U-HM!^X΂;0CynR:I _CS0`p+~YR#NO%Hk+&:[ԨDa昗:DaSB^8] IT(^!.ǎ A9 y'Qg\!>t/x/ZWx+oRTN[x P5wuĆ׉ !$LDM3B1Q+.[!#-r]Ev Upb~ !\lpa.IVֺo؊bH #gfYRo|ޖ,u(wcp  T)߮ʨBU&x͇ 0[.W@bigq9TŏJf bJqP_#JMRA׸=) F7ge$|?ϲ%fOpiI=w܈K.%Wt0|򘸇Fkt9N$7*dAx9wI35W3k' ଑ lMp(A<瘝g v봷! sI)NIS;X~Joy؀~=Nm99¹$3M<"Wx5Ϣ쎞do5A]hrF.8e2,U__Y[h3OK5k)ZZn&Q#:Ȱj$\*JaS6515T)>||Q\(Ggrg2KÒꪓ Q-Ub*=ڸ^,CGj25v3>c׎q-> }!L~m9\\s: |OEl!{םtl#?>?+Wљd D5PG*}ڝYB)lUP 6+PKA#5E5TsLEua)Wk<ָ䭌K T8Ӗ7|+hk1 @%۶fVoOZ2@p+zü=>|U9(Z5Gjf{gIdBʳqW8)(Rn,K82+-,Vv`hVޜU#GgT?L/?8h> *J[ mO$-aOmR9gn+lg_wV"nZ q+RO7M#UZoǙڣ͛'44B e`|y?k̕զHh  v8]k$Xjc * ;HHr$QMT9M'k3' Zme?Zegj5e=暈TK%YreZм8Tcv ` w-粫1Ə@+Zr17S* %x3 u0S95R]vRR]vZqU'F=b /K2v/U}$&& I#;6..ǯkddk0ڄZ+ԝсLCg=׋;Z)|wW7&b8BWGO)Znz<,z| ( c,qb}#Ӣ2; zaFHІ#BEƊ%2e,H-0.N[.-O8888ɫ",p/W!ƅ?q|-C2d[ |Ž-Y]7MJqYq\j;gOda\tҽ|O|i2h,Nqo*(vde.!uIDN? 0Eٟ/rghfn ^I*7tG?Kw?Vear ^T =8&cd|ɪ7P+)x 9XcN^#JU<k4i*dȦF^%Na$҆؟PKLo b-ɷ9ƞ\3S 9Ki41Z-SP=)Z)_ٿw {ߞC>X ܏[G˽W1B0D/3)5:=K݆Sj}-7%a3\6%a3\ 3622A:X_HuP5'g)r`ԨtD?Hv~mjGdmwYGhL M{T"E[e[ۍGYLѰg̽`axƐ4%gt1W0C@K9;"uF: [ps*EN+0Zb陼XIb*|C{U[ߡOF`7\D 8\۞c j$۸$H*(H08+pT)%:cnߓ"k9HVG0ƣ{q])4ֹc ¨fTZ/JYUNyHa ;flKyK3dE+$+GRX ym8uQĔ<);bdrpS|Q<>G.풋GGRR^9A-eϏw Lb)Qә+,5A1|Gf\R-6(D0cBCY蠘* &ueO1Gb5ľR"`%311EJSj'1Յ=9"CXr+F'%׊I) xƈJ;+ɅfHQQGwF3Nb!(V4(͹Hs(Dq0Pʊ פuBpIERƫRpɧ^%yvdʊmKZl7J%MípIx%@aޞbV_+x~|,}@SFY@_|]spÀ_,{4滷7% ?]+^۔_r%MK& z%My3΃L.ItLV5 9/I N†t"#.&.6.)SL#}&ep/,&&>ճ""\zuRRʔL7UsIJ / #L{ԙLXYF 1?D4CzNJϰqNadZ!(M3ᰓ=*@'%jaSS|V:H#㲳 I* o&aFҔ ø}O) j0aP$_S,8֩ڧ!a$a,e+Eĩ q _aT{,5ts s6J"2Lj5YA nVF k ARjuВ eJI-IJ`z0p!(Cs# mc;Lߓy=?YL·oEx X߻q|5e?c~Z8H_M? p9Vki>T-+L HtZկB[)҇zCPc^]l}u3ɳgEhFI"!QR;O#͵+vs'+esG\.jV#g˜ B& YD+蠟085 (hXS7f1B*ESޖgi*H,=^E(ao,XoJ4b#@Haͩ tV72(`!3%sTX, D xSYŀe{‚@78Rop_e[Wl ȭLGVƷz5Wx=.Hr|)7h]mG"6ϾQi-?WBȨQ(%|&E``00Zf0,I<M[U%I>A}z*Q0 S1睁HYN,?3xfk!CǒZ!?R\ΐJB 8ˤPXU!ada6ULI( UaARmS,OT.)ˤru",y_B1`xչ2x&~Th@x8GqUW ߼#a}r5pC_N}(X5+m= ИRYcNV൓N.fP+ VR[RY!i[e_s`( ҼI.R@pE(2EʂfYIe\b[tOpJ :ڡk L|XEbhE79EB9:})4#§\k2JdX qZPLQI97xdJ#(|8U|H"f..C+f NRuB=Hec*9Zb3ވf/噁s;dv7fG28?1Ng&(SWoѫ:+sӨEN}tsp9l"fBtmzy@@k[.irbfK  ڮ::ݱ/6th+%N?Ov<"I8/.k{ kˎT_T%d=TLCޗ,yI|DOM͎ ~SJCꗛ!r*K()R}y 1Vr'8B /w(49n%7QhOeV4ٖ䔋\%za03ih*ϝbVEt}vH`z+hg՜`P]7HX_Jr(R(Ub(`$Nũc .7(4˄lLd3†Zy1J:p`sj;:0P$b2ctW T6xZ#!ir* H.%W [<1QrsBPBkn^h@LnL[7*I6Z5^IxϵU4dT`Dõ4 6t!4,,#LI`>{ᙶ8o[RD`",c:BD:%ԮƛVD-`-t&-kBJb Ղ):umLɬxOYYudQ9#_`Ó `T)>.qo[fŗ/=y2\>'`ʸj WgQ }ލ&1!ySYh@NivR5`tfmXԤ~<GomƑ99qp7z~v0Xe?xHBhp~^6g4 !b plio|pOԁ"pۺ܄VWwbRgTUpu= )%py-6TCz)[Ev\](BXvJI,s d IӢp<;Dɽ{nD9l}(D>}iV׈&`[goa'U* U8zĘ@D!p`ga+eL+ bN#&tyuY,>7/[6X~/G("W}ȝ2oS]KOfI׉3dbYyHc6$7_kc]2פ奝]A6 l6]OV%;16 ]LOPzS[sΑʻ% g.J֗u1zYg~"ot8vYg.FS:R >iv|ݠ,bx4˥vb\ޥ4n\9?y cDg$TL*UN筂ӱY:uo1ES'co4z^F`McJr䧭ίY0ǙGǵ@gH'l15 /MǧtJ_ql'\/z#tɊYZ6M㰖D^:R!)­:?SCBA0)T> E 6#2ש"2n45&Bi%e 19d)0_>"ru귌vlfQ`\trgF]Oӻk?r[ٻ7$W~[T+LZ` i cg :Wma䅲Qܨ](#Wx,&$[" W϶% Hshfbt@&hQhX1^nunR Ҧ`(.D]P.wF*Bk3.^vIs}9ritK?t|tISFYk/yjQUUTۏ5 ZW跬V /V(Ng 7p_۶VŖg\`4j{FTpPi%+4S?SՄ)^_+dsPCJW (9TotB?5 *5[:5sJ];`௑é¨R9_ôۑPט^:Ȯ5N^;Dt5:u8Xf5hZн0;Y8u9)q5%/gpu")̽ 2f%#Tղ܁7W5Bp@tFw5,0ҦZmn;gJGV.tR}buL !Bޤ*RSA ,:z].Q ]jh[]#B72#L KYyhu|IM ;b1 p^2oP"E LFqe> `ZșrO1 $9UOݓ^=7Z:32(*G&E>rBl9M-V{a){2 .>92NHDi, m%#3ó$siƋS/Z3VU7~;YAa%зxgs.\◀9lAu/cǻ[jhoy0sz& 㯣O~\%ܷ< !犮3ӗn?οxҞ܍^?=5wH%!x*먉p aZ󨨍Qx&t ~_W!$M DҹVlΪ2w\niy&lշ;:c$R2ʗJF~󯷿!,-dcE]\ gP'[sˀ(YTt\VgSLpn-x֜]0H߹#[uJ9rҴ]p+!$&B}_6OQPU)sXjK2DtCUFT 9A 5#p3>Hw &c1R_pyJr\ C!)xd$zEۺ]\9152,%TFM8+3&roJ[)ѫ,TCѸ(2noOy4pPxU盕yJ~sd8;oc8/O1M>G#73x>Ob+ә@(cpza\xֱ|OC~o= YG>~`C콲CcBxtVua6u< OBRZe6-\pp' X@kmn۫\YՆ^n> ʺٕ4h!I&#JvtT%xCPg 3곌UBx3rE8j·D;~}e mΦTǽE?lY,5G4>v޻pՖW{o~<є6%O3]Rn氋s$&!#6VBn}tZn.r>*ETy-UiǢ\zE6#/JZ͉)܎zD&wĤPL:4;}uOjtwNZI0yD24"F] e{)U>8^޼l/}b9Yg7xyI9S5zR3تNL6U]]>㥽0 50d`O)N׉NP!i1MMrmb{6:ՙmZ zM{$>9#}$hi[$K->Zmq+T Z{AB.dL(( N[J`ZrM- ZY-@^t^R yJ@,s1JXX~O(8$Y~/s;E$0=AUMQ>d@ƢI#Ă1L(3/>Q/(兓)w,xD:+ƫ)RCά52Vf,(z !g\,SC 5+h?'b~K#gЫ0(bL5(jD tQ]4NIfQK:h{m:M!?/L=? 4Sc7ܓ(bt/<̷PKxlǻ4I*9{ {Ҁ+?O7FO_^R<.Vj.s@G$=B~-_3,:U~!0JeR|uЦlʃNo+ Ƶ{*F+A\@ `.cIS+e`;}{$)$tL$+h*0 :rk"p[`tVXph MZxCMd8#Fh@О G} NbٿBnHm%ITā@R KB$ZsFj)iWgt|?,жPsizl ʜjf a9SbA tW$bV[7+֫ڻUJ j;Z ejW{3rCHg%ߟe.ms5y"Ӯ_ȴ4x[۪֩A-Bz.M*zGVG jF8g;:W'piJ|IӶ& NF@o9 t{>k 濛@Xt[$/d:HL!gQS,*k!;Ɣo2k-w|2JD{ѸG?_9nMZW'8m2<,lE1(4g@a1$S$(&͂"H!8)#$QBKAپԦZ(a\ʻ]@ ,X&sB[rcĿLIgj6p2Y(M[(0=vTfN=@dGt j-GtrD"W$kG0E D Oyd2󩁹4I@yFE FйN Ԟ(s3-׊j+Eq"Ց-mʆSr:?wC]nz6=.=|Cq%O5:LQ"7KaF`E?ŻyAV_g {ɤqx7%6T$}j<`>Q:I$c''jRHNOA_aIm$5z\B=J)/@v5I㗙%4"H)y"4}AJǠtA@g|D(7~v\*iNIRYUEJ@P3,_<1 * pЁm^PY-Ub]ܛ>_$e%ΧĶ1gS9Nyo\#y0=?J&"Ir˨50ۃM3"]0=cayCܷ(6h9ȳ^ k,o݄U5Љ%*I}5U C Xi"[%)x ,% !$^ !d&b &ׇ@LjVTK>9+'ȀV U.:ϕhRNk9J<40VtK1K7'}dL]i7b: RibDD*S@x|ᐡiJ E,AE<`H+Y8Z"wD]G\UJQ3Zt,[_&=4Jcݕ1eo(7Cf`J `Gw>;Js6n4Ǧ_$U@+-)'erݽLZ r)eBJ"ڨHGǜt&@x(I)YȆS,%u]4@V@{M[ȏY1DEM-(MxlliQ`pr\Sz=-X Ǖ{[" TI}$ܨ q0g#I!o; };-d0)_\xezLr;?{Fe/ ,yl/0,,Ηm,>MI`^JUz,T5$H"Ͻ$/ϙcUߣ]m}~PᏏǾ2^clz[^IG I?6zboRSPI"LoM79~=iFV} 4Ij0#'OsSW8ՕO ACc7=CGH ĤXqe8'rnpb @-XoQ\{wV][` P"Z+ahn:⟉|LurZН'>~Ω,BU8b~棞OFK-NM5;3Eg)T _9q`\"HZG(.spMk\wDK.ҒV+;7>&g`g\y- MLpmz2!~to!MnMp.hx|͈~QS^-M`_| gv 9ʿ huy5 16Bt y| tw yɔt y砥 $=Ă-5E%AK5mR oD\1zCNKjV,8<Mqԭ-Ͻgw.,aoL7 '>nv:D^o1z4~Z-0Ax}Hat !ڔSqԲK*.+VlK,U/. ٧<1-۪s=+WJâaU-^G]ȧoD7lndN;ZEDFѭѭy.ZS ∱D7!b:hNWK\EF.<䍻>65g_ <q|'\!UpsHzVί4򾾫 Ps4J홻Yn$|kWZ-cxsN:Zil -8`ĜtKCCq.N,pڒ37`L`3V$'[$2[ǷZj>CED~j \d)9ͥhS|+TW]xw":jрNt [)9S6)h͆^.1ݺ7G"J GGiAjoHa'- 鵞t# [PLW<zF0$Q50y!Ѭ6Z1,< 뭝- m?<$&(Dr&]-v}HikNUp\tk6ռV8l]J[`Sk/WAM`ldsi Qjj"X!p'#,PcKEeX\H@=32є"eƾVׁ͙4Ws_W!6pY1Fj8Shp\28f$iPEuXd\-XDS Sƣ+ S*1 |l=xZrL:%ty׬ bv'=$#DntyA5' >kT8g7Ƌpյu_0٨K_n*Oa?]r"d@f̙vu3 5F}Qa^ÍɼeÍɬ/1.M>qo82s40;f᧟kl͍5~mu76XPO {X*ieOwJsL++?? 7NՃ]|iɵ6gAUY 4 )ӏ2(vqRf31Rf 3Qׁ;ִ =VYe1()ySvf)KTTԦ)팒{1?rCj'*| ir};ڐpch}fu!oE|Aw0헝EJѭ,)|Gv-5idV|:7E>%k;@I,W=x?WT$1!rwV+,Xż: Uw.C9Yٍ濺jJ9'9$ݍZxK^[5++KZu#q Zc69眠DwBAG*ZI!ޔ0ogW+X(:TW*ŚR JBń%iMU b VJ]S|kMcsORtKR^Yr?ZK(]"~,5z%t#Sda: YpiO]5kw@Ƌ? 8#}i0s6 7:`1nChM@ Aa!-5֦zc.Z ZSB h /ϻW@+筪]A?})rM ͍;N` x.N- 6?LڧUe->rmQCؘ}b#VK=U8pPO`G5,G5/RXzxd-#ӄH٩~)ycֳTG0[h03H-ZptpߌTE!jm/so?wɈla?eb׍B`K QB˄)T hZXK+& ߓm)6(~WDZ>gDw;d PS {׮tWpЦ^̯k~0_8I???\Tp.O 68)Cq EL#*j- @(T@W2g䯩?X 0HF}

okA0zަYCܱT|j'%b4pkXEu|='S"<9I紗<F ) tfSY֩?,+~߭>M?[5[eյ} +Y_Z9l^]Kź;V%(+k_Ak1X_V|1VeIXj_j҆!X뀿a֙&ܠN+.L}Hny?az3m%V\ T0, 7#3s*C+7ܳ레&NW!2I cmZw*۠xc[ߩ[jsOX/t%KLj)̢뺲4͝mHA FN>D#)kLy__+Le >[~ %]bbo CD±RQꈖԢ?e!ҝbLKIn^eVE^iZӸ$=/k!-agz8_ie10Y,lyvHlJM_샬>;BXYY_dFFd!@KrYTgO)x< +B,ZsΛ%.~z؞ =GL-1"M,oϿǏ)YxW|c̐S(v1ʑB+PW D!S1\y,K<}8*2[`Z i:8Iޯ:,ԴǺԗwԗփ0]Q"ZNrQ: %XTIxI:44AM]%t8.red:t&̃69+&'L 䰪W8VD]Sθ<_^V|y=TN 5C2LL#%?X$80}Xǥ0;ٯp(9gd u:x2?Uwuh䚳&,P9ν!h?s*{@dXSLIm8JB\!HP!EBaH7Hʭ NG(T+NldȻo6^ϗ| ('?V0o`L@(T ?jgZ|p?[.Hb3A΃/]6F'ѦR,`ގ4e*\4;+:6r ;e6QQus4A;u: & C (IC453 t~6Yl@.0E,͏o)`4;G_?y˜&=z䭰Mc𯮏y7`}uWg1bj;FwQ|ʅ#}<dt@f!խjq2669s]VdtVBq;T#w^,A^ԼD;;?6_tfc1oZwx& [k\<MOTgd9-DGW@=k=8F sLoAҥKKB@TSxMJ4G#-1QNJZs/%oc_/r i^]bQ`R햫—sƑf LH$E,H"``-LEA-o}(j2GX yMUMA]oV"|tN*RW2l%BHc [HL!ߢWOt|,\d 'Ŕ}4k!$b? 2p.) 4BʲeDD@`` SDH Lu7:dj9۸'2a`:{AS ?y=gӁ%Z+[ʹ[,y5 }5O-`Ahղtϙz7W8![xI4dYGtaRa혥ԸՊQrA!:"[Bd>ۇ{W, n~ߗWy;of$-cRmY<}, MPǍ7'z\PEYzWwO?d(xM/Z@碭'%[uP5OШ'F|ó@P5(ݙ}q䋆}opou(Lѝv\ 93/DE?n3a %ON+u^)h'<\ ~ Xj%b/ߑwl_%Hp5_5`x#o`P@x-.&_!3__MlXKޑLdyA^ߞ<|%F$W3_|EiS(Nw|5yZt ]aNqRb;@j>>5]2(YivAq[DltFADq(A*1RNH+#&8ΘӾ4kQ;$ `oN5L$.~jN"וIli&ï527]>yGkW'ԂDT*~M0?@`ʢH(rTDɄM~7ـ0LVzc(G KLR>T#NDBȀ C*Ə+,w5rV |z چS,+FI,0^C8x˵?{-`]$.֒!t7kH\}??,ne3h XqfRT5ƌU7쓙7yqNҳz;yL)A%+ yϴgF| 4,#@`.'-hzAlf]b.ŝ $:YQJU}Ue4ds=KaBC#9?W"*kA 4Qv`-} N҂[Cf0D_siQ,)kLqPPwMksRz9_s~לJ;ycwja*9ԯN~,\#x4$HqO{4_Cx18_|{܉~  8ş,j:800 )˜UjzDUzT p=hs8l)9G:mr'  r|ګ1L)Oo t'xYJvV]l5>|ΛCmu8˂X 1"M$[^,reN늌1"͙߭R&շJcDZ:,w?YxW|c̜̐z5ޮQhJx3`8.Z àaAz:eg8zjrM+ /| 0X$/-1OR0"DF"@2Ra"׼?ޕqd/Luj 6q<). IQUPSlEv7ͮWuzkçlxܽĢG@6IǎNyb&#C)chj!1GnWPdT/UR8dDO]B&d>TRڒu,R%CpWYpy8z*3!y]=JWΑCUۡgFS5+iKNLb-`WnuSTѰ_dBBUָ g,XU+.|AKTb- waN0mM/&n K s 2pZfWE1qV|z>t밦Q0 ǻa/vqH|r )y{_MK[{t\Nz]NP{:Pf&Y?' Fl Gυd<9dYF+iYvIKS'>Śn$JF=/%[^IQb!9qHx p`PkN3!'aLŭ/WfX"`,qihc%xH))$do_z+"bcie_tEx%pḧ́^g:lJmZv}De*z2Irx# *99 K+,+E/)TF/Xg1Φ|,M/\H6{ϞHW&R}]IzzLNFHrMDRQX|ά8c,g!dNXbE:JyqJ yq#HotE7k8S8^ҤŴC%cIpJs ̒fjkcxGsKbZÖ]֎i(hHT.EJTE QR2r0 ) q? 18 o8SĐL#e"JfkX!hKiw3c-*['[=i΁x6gX{ ("pK4F\ehTb*L5€œW&q@(9kb륻1s((|mI^jQ/DXXƧhpICk+RY|o!HD7Ro~ovsx 3JM*d{Q%? JKiF)Ef"9BZ"Lcɢ4٫6Ύ^R39Kvo7ǒXkACdqIPb&*)H,g P$̎as%Rv@..$ E)iKRbI%49=2Qly >QPh"L"cD8Ψ]nm3UoAN"ѺKknjSOTO i˦M;̙xSjÑ\ʼ&q2r1H0lMx&@JthԇJÿiߝQ,1h|MI tr_Ʉ.9Mu.N*U?S$q|gkqR:Dz@N]gBGOe(g,Pն2RTQ=0?j[zPy>PY•։KSVx$  'Tr֒ Sw >͑ UyB"|<lVCj\'r;.4IcYJp&HuhXɝ0pB n 13)B6'F*W+=Dvj]jt=Nv[xP?Rcߑwo_x|SRddϝe|fdB7W)ΊBwzx,KpՏ< LL;Bj6Ci\NuCj[Gu 襧Z-n~ptʽ?.>N /7^nBZlXZZRq Y{M]Ys6}u1 t{U۪Z2^vhy+.;x;9leIc0xv·]w05=kgwݿ#݇.wGLUTRږ%D5}a*IQeuD$L$2L-=:FvHXIdwAV4_bw2O6x=!7ٳGC;wбGM|e ݭmwh(ݯI|s%ŬO%x!"O I$礜Y=E}/_j2FF +nUFPY &R|-9n>;5 PÔѺ!"/?qSk;DRXS_zşOh@FwGg^_5RTu:=U΁K=E\J'v-^u)*9 kk։&+c%X/>آinKsz嘷@^=%x_}ʡ+RȗpaNh_ zQ stWcH.Ip^L!zqv@[rZgv:$ 1yeVbL]\x[_p3+10vt164w]Lˇ`10,q߼Y[9q7S;/d,i)>Ro(7.\8E!e y fwϺ9ltU'1KyU%V.z7$!\DdJޗnBb#:sTn+/xZl[xnMH+ *̝IИ {r(N:^*q> 5t*{IWD$ ^҄r ){x9- v;aw[ݚW.A2EB^"bX+AsEϭ8+K=in%8*]= PM~߯ѕ@1:; @u9O'\&VZr/~Ohb%(OpX Qv~~r.~B+N= ?ɕQ\d^FWL?'4(Sg'`* ?ѕ 8;?#'\VBppAed?Y^$ٿvO⏫XHl2y /vsI ynRG|Pkǖo?*t ?*tz0uz堎>U"GzU;<Zx>fb#jw#_G"~nLgONjVf4+CQ)n׿ij(^M}TWS_Yf6MÝ7son: z;||o0tz7],f|zmC2g:^';51x<ӧCMd]ܚVhHW-'YJ(-iЯX]hc 㽂 ;G x  "Ý[,3Y>3&Iʍa$3< Kp4ʶhxm^YI҄[he*Sq,a[FO5#;?Զ(# J`֌(yE)#TrP& g:#`IbFRl3ZN +OLk,ь(QB8ܶC";BrwCd+-;1fdGcE% _4ҴY ",\ r,ؑR%S nƆ1W3*[B`&`[" h"Ȣ$Gюy)-L^$L 쌑 "zB]`2ΏW4G(%`q ` a $KR103$g&lfeR tK wpSEUv8{,F}\gIʸi0~am5"q,}{q޸"e 6h_SsXD%WE$ GE(r7>@9 `!=Fs(N $Apb%F Qa@rANi_$"\_-H {B A[CeM&&i(f d2LxwkR2Q.zktBPd"x ca'49HqIҗp8+i{ȎS,p8U!@Q^zi{ pw{a>ncb?\Uwu]]{WW\W|pQ |RjQ,cXb-$ͬRbA+xf@' ߃Љ.C'o F Kgj$^ "S|I5'ݑ(Px [lΌ:~R,%/w\j|[>u#s[>< oa _~i:O>hk}^ox(Tu&S2_>2G!Wȶ&9.=T"J+Wxfpf@.`ZNݓMl gS%~|3ph ÿ zd~ B莅{ezX5,$DY؄=tGw)nPicG[]|y&{KAhTZYƂ+Of:L\ICxkuC 8~]pNs WKAH|BymǷ ']^:xDAB5*$jbdQX(ƺnp} ƥmdf듮c)yF)R7<$RzBxH@㺻r_lDGXC~C!} 7z}\{Ka.VU ʱ*35DZ*E(/Zm{ee;X5,>.8UN-GwdtR Rq9Z rt0sTZ.tӶx<}.zFGX'>('~C0Z?Cs!2:2rg 9 >z{i@QyuvpHkm GthAN(/C B"Ii]pTwwW-ce{ jmYW,gBfi߮o{ѝdߖ8Z߃rt0Mq{r_9I}ȊC%Ha;*$NiCttj 1j,[ igE_!+0Mٺ}{}8r7#w8r7c>8I\%H*1$yu4ljH'4ThiµZ%̰ nqWBK tVhhBK0=l%GU9/wVTsOMKhNi z9!Zm`Sl;H [ z9]}$% 6 G0n )J"`lL(%,V15j,mp ɈL1] ( Ŝ$( JPB*)Ui-FTVB潪Rx?荶 Ttۅ&gWb*/$gEع o_edW9*CdMqf)WEv෫r. jXp$Ĭ ri`Ŏ`sH ۩K+ 3ɑfa;[1SXPvt+ `(#G.ǩ+aw }6bP`L̘T+dS;}njm 85ji<`ZD?Ӌx͟Ơgcax۝7K5뺅9q=L2ٻ6r$rw6!`d lG7l<ߏlZRb7g$Z⯊Ubj0* TR'rC A9yegs"w%=ٶ]X v7$!߸FTPƑv#=jX BD'U[=;n@ք|"z S]Rn[ nBwLJ B+rsXli͌oW, #;N`*9japq̌&l!M'aH|iP2ʥZ c̫ qbX]Vږ0ppSm&8$GgoB#n4+2j@b"e y5UX9e `A/Q^f3пF8#xY0T Q$RVC[PaQŠ0ͤ*|1 Gh/ WXq^0 Z*#VI&}$܏)bJe48:ۻb9J򊷇3t+$Cj-W@,!2 2H5f@Z#Uq b0wg!Tr1,)18lpR_w UWb_ÕyBHNcC(Ǐ3V {ǔut2tj~|JxC4~ch¸3 ڮ^VvOuѲO-۽reT,-)ꮼh}^^4s.Tyg8}"De i22)]5CXyrK9b2vNDfӟ$nn{&6Ke`j B%g̹)Jm2  !P 172F\_m}Eo}1S߼{|wy[)7m;y:>}Hk!-ԙ18 p D`Eˬ]])s#gKST99U:jDZrJ52uB  6Q,&Zm8._)p7:e>:ZJJY?=2dҐ[ɏa7rqtm>~l>('(ꝯ>ɭ{ͅ6s')F~zwbs>\R$t:=W)s#:1Q) '.ݚKB1z1!U6 A$%b.*A7 !XסA&c\b&qȼ|BF8TaԱFRT*j<mOTtxӻEAPR -bCR 䐷o5 ݪA x9P5}e"TAtmgZ BL D`Chu^I'jI$}>v#sLl;Ft $h<_b,64he7 [gg) I(кa UTXv^M2fP L6WFY&|P3N-YQ IjՐ悾xs,EZ@A3!H1RIS$4s:8}1w4Աj 9'3H@%f`Vm]-Hp B6x93A5 IK r+V %HT[(%3)Qʹ9VK̔EZ | p(Ž <tON`goBx(;46zɀDɋ?9ɫڹDv(-}!txDҵs 3_7_J^Y3/G/Pc W@ (8άR!7 ȁ-WiJUJQYEQ-Kfb:!SxP Ϩ7kO5~aTB_JTG{GT#Z WpU),BղǏUÖYCE ; 9?W&\x9 Z^yjP=*=DLa+!q --= CDZRОbX{6?yRFU3aUjd=Jda!u%OU"۽dո%$vOtέݫV2j8/'s6m^VmCTvXT(XNFCEp⽓!Co;zuxDB+RJ~Q8tG;` P@%P/<͂x%P\9$#Y 4+@22T(\v׍z  =9m^NIj#39pjR ]65>Hnsm",OwY=L(iCɱ={|fa;rR_CإTt2Dx)jOjgz2NQcXu̽֝zɯ o8a:|?~{&HOcMH#Xo;I ԝK,A.uK\j 9Ow'*'c?eXWd=$`UkQV1>UUDFEϡͩs|N[\, N[/vS[=JN܉F\ !,XwrpFz O9=S,;JzJMP:PxIf<ִY*\D.$3=d/^5(Åks-qa_uBw;#&qFpuMf7IK:%,IT삸:@*"Y%,(DnzfAބb䗰Bl>A ًRj|T(Οpcnj|8*أճAsr~AkI/f#*֔RcDzJ`X\XzXd0VCj)%{ĦJ/w}^>{M|ӷQ6ޏڧ$;.hCe*@J%NGbx$ d!&ZZDg;gO_|:q_y\a4nLZZaӄ@M0Qh3²+"W\py*Q"vRwU 'gD;FTK2PF@vii"+A.8X1ꖚU´醓E; }񯘰[  حPg2xKu[{nhUīއ7qJ[B@D-9g0.LˏviV~U`0 LFcHmWlbD9k!Șܲ@AִaЖLtG/?nR;rOӾ!XGOTcDGM3D 9e}#?JR (LYf2H23*# $Xq*daH2M`>S{A ҄*}k Va9`8 v(ʔSpl%L!\E a!Ie؁̕8ͩ>e/F@d]IA ُѫ~/M}v*@qQ[n`_r ?.: if6݌}smv#eRSn($A8.(pB")I )Z 45V8e¤- *i&Z m-RA~muu#Π$#E,J%*9YDr5HNeR:F_p(U+6<5d+aE%Lͣ*MNndei#4QdD*euҋ3L%Rq vf+VL@lݺw38M̈3yS)T#3pC 0DM2*`0Fp sh 7LHH~ rAֻSȭLe12]YoI+^<# av΢=yږ%(Q$%xf[2 [6Y_DFF_磂L?*Cӡ8+$DN3(nJmÅ7Bpq!`1r'y&yLɘeJ)(ȏBj+vÍPC}~9|UϏ~~}?_>=A㟾2?v.x6@3??6^)@R} /_/ΨzLck_>Qv7_z9yoO\|1)nvQ@s֟Rrh nKpsCmU$GxJd7w>X#(65t[ _~Fa!H6f˂אѓfN'ySY J]9~V»[HpCDTpt0X{ظq,HhQA>#]əP,9AGѽK#gHZE˳ WF M-=@L~pW׋m׹>VU{6מּ^ZA|> SOTodJ~$6I/MjIl!]N1+&d=N "2+#9ks y~v_nv}I_0H&)!`m~紺v[|>Ъ\(Zݭnln'?\'Wۻ?WI0-|q{w}-_[>ky;|YΗA53K/Kmw^z;+yQAwWdRm!HŰZ>ߢ*g%,KVɋ9T6@mXЈ&?h`' Bu SxdNeP39L:cd1aMZƎV(%sJڽ C\s8l9֨j{i-h<ePu&p3BѨEJT:fc6Q&$1$bhR` p<0Ju(b̠-< =z_1q/ԑSuKbcbkʆco("sVP1JċuK|H[ ƟU-\wr+͒v^K!1ޣhP)6d<[ӟXD>`  -nj^>%Ƨc?ۜoVv mݕ2x(p[X -.Eo|~.6'l-Θfdڪ"ᭊ8o=z4[=]w[Y]}B[rx@$M"ɭ ||p= & w c˸K3v-cA [5AF6ue=?s=|hmY+ Z^H(O0fEk(m|òjv oV!R GK⡡5=uۺ8 N=Vw0\8Ε*9i%fm^AiX~{;*>1 u_cТ8֭Wŷje(~':ҨHX,:B)Y=e7QwQA%XvLKr6KWGelr$UP][S*n<,y;F T+]rHKr{/7l(p=u߰2Jl ۧ邱MiA׀~u L,K=<\]F*22N27Qdz ju;H?B U`ɢp:r I`;.ŕObtUټF\iuRy8EU Q0Ǥ{ 7<: 70 QwGP\v@˃T|HqY~mҘl4, ݾ@16*z1jn7?5k|KT$WpWřQp3pIF;v](}(hhŤ4. ե;Vt\]7vfK+,PǜdB)% 'xP+SfCBnDm"ɔdu;F&$_Oss(<?}{;`%$#j<\KUwZٵ+dMnڪ:=JF!S䌌Ro>|Gx75B"C>P#KVQD5:T};h$ A(Ss7\A0a%,G 9Mkb䤹W"Cpׄ:p 4d_Wְ\ISϧ8"6 4Qa!:*_ܚ ];+-7kr 3C“8ɬId ÞW)i]Ɍk=(LlL8'ͣd~O@ J.I=6)+qϓ4Uq-@f9%]S)dշR *!+A` ( $ 8|HC@OYCcp0w::QF+~,ʼr9A3 [H/Ǝ xdr<-*| q%xjmRmF~ Q2cy}t+aP"|.D*ƀ:>GȫK%3THS_v[X )v43+{gyQ@zF=mC {xۅ[ J >岣\<לU/۲笫̑Kz@2UR_Wncm 0{ C> f /kʰӁ}J)Y+u)ؕPg1.LFH̍E87aD9zd/9U~,S ˎʒ/vq˨h G*Rxb.it@E} UF!S%^b&u1W\ip}㔳UZ8żD,ѓ Mڂ6*O_49 >N:$=.唌rRcbbda4+GGwdJ!yd͜H|xiMvUU!a8 lsBU?,Q%09Be" > k8{0M%\,)K(/9ao׿ M0!%c^7紺i)ӬtḡQe90:sBQUx9mϐ<:0d1P&Y'r&ɴIQ# jF'XWm!'C}-h0_ -@8A'!y]lޞTruB1U1Xh_kG#)ym`!TXe NHNTSZ$Ӫ<ũ:A"2b-6"A AXR@1uЛˬlB ;"ZnniHøPOΨݷeņgC\!*JewC Z-LZb;K=FiBe\UQb`T_?姵E oyǘ!eFgZ)6\h6ZLkxzlS,-m.5[YҒjG5qTéK*&pcЁQikڨFRLqi!u M`ZЂʢECL Lt k-ihOp G ;$ +9Sma̩ﺥl-Z+σg6s5I*CU}T6n ԨӫSXTtmonf0le{uV:s,McZ.>I'-qýEA]& zU#;cv𘺦[FO GI޹?mu: _n#,j.zvŃlv˥犀U7WV٧F#i [?C̛d˙0_ ^1iK8{ܲu%ظ}RZƜB2el?<*2*Wmh c&:J)XUdQba(Y_&B&|Lzk*v []QL0Ma[Z--щCKDEa:冰UMFY. xBX48iG%@ni.I2CSF*82 xWMڙՠvU|_ %= +ABY)ʥecVZ}7|]dPmdfsA_C;!{YkYHtmGm߽UVupeq 㢹[TD߆®ta?!0kMI-Gn:&'o4y}W)'îAj#wWD@b6f͏hSn%3iCs=>bl]y>=\wR9-o:MXé- 2jMI?Bj6AR-׷.Ǣ14^P2;q%\JCn"+\$[W.\˭o P>8*FSݳ jɪpAIfг7: zrXt/jԢz.NaH'[oA1>7CD<˴N*9KZ QiX jqAMh=pB;r{z!3+Cy]2Šj<ƘMkPr#v+XwBk'hTﳜP#^0rsxjJI9s0. b9D1T+jJWKrm梠 e<4k:BE. Rz{w"Y(sFx98XZ+H81Vntě|kPK-$[ޮWyFq^dCIgǒIͤ!^lP-F<HNu̩lSvK.~WtP6.jvަ'ji~>W|w}=E8_]wGn]._y>hA e%iHf[tq;](z-"o,7] ;ހZ0>T`vLksb"G۔`j]M(lcM-­C(keDOXhz%rng]Ɣ *EJOI%Nhmݢ*[܃wU[uǼ`v(GOTsVX&l5ښ^4v)4m4,"kBG!zbk<5#3ͶGhnN5<$P)d !eF..<P)í }`D^4⋒Sy}v bf t1 :i%+ nNJ>?4uZ?#Nz[Yu@ڸڎ33 #w\j?` Z/`>g6LݜJ7\,ף0mHR*+CwVv̢Ci2e,"R(Qzɔ̌5e)F]2/j00j}y]l"p;/l_~ٵ̋QɛTXEdnfxg4a׸Tx2jVU䃔[^_<ۚt@ϰ辖&%Zl.i=DS.a"U]c+`xocA&rw9_B2ky! 6ԿZ=)vb{b*pݱjk7co<(}3j#+k"ɠcJ|3VCo.x>)XC)PՑq:Am1 !gWނ]8O ˬAvPw֕=TA^zAKq?{aE5&7~K0Or)%쁶ppMyPk'Xhq2; ~q^v[*t C5`*xBkL^fVQehojJ^t_z^PR^Yl}M`t]uSx԰Q?'$rCx]W"* P70T{$tAHrfijGנ⪃>1> WFy#JҰ= 6p\o;T>Aug4ф^E5%ZWlf:QÉU:u(T )挷:djfwmz4.1H%>rD0Lp8r›9tX28euM'+jd[PȤnƽaѭ5H3czqkճXo_Rm)Ֆ>&2DK_AA[o5[rE@wb(J8ݶM|zL5,Fsz_;a<}{O8IR1_>~ t -j釅y7$NHAh!w1pJyZ(A c0E{S GiLb!V)`HΥrDAcՊ2ͤLH6AZS OAPͰ2j]*cd'!p)gg'$ 0A)i63sJ2ynPܨ pjNZ-P3W܁9Psj<>ƌGyM#8蔆qJ]wpۗpǷI6GďBA AK"J 癥2ba@)Ռ1II ؞z(@I=ê4I 'kOk\s-!*h 5E{QHL" H*1 o'V<9*U.IE3gY1eJΒ 6:uBt'!H(\Q+$4 *wr40o|9|"gWȀV(+GBzi$v4>@'C0K<֔ݷq eDI?/|<=׋.tLJ~┕d 0 ~i(qxp+|0U+L6/?B5o8z4BN4+9)V?37/3_|$p mh*c~rP|<|sAp\2p j`J%"چ~)c4PK=vvl/nv8ZlD\qf+Y/ڦ2(7I#dzЅoWD(lW&=!4{Gr9UP- "n:dDkgr 1*[qk2CSF*82h*!ƷPBs28N'e}JBt'8i_Oo(gsv_rSLe84Ɠ?ZJΪR)ryp6FM=d|*QߡBѠ@鹞p6.q㸘EN~lv2ׯG%otp@59yйetdTU,zrK1!f(əE4@ &Z8D_Nr=(ĕTr8p< 7D} e'/v9%LJɢVRe,–Eز*lRM!)WM@ĸ0M.٣Mi]21lcH ▶O{gA/c0(nL03\jG;Zm=_ 1h!AiCC{cDj) 5k\4RVe$ow~lg0iuv_NH`4zM+йAMN޼+iN[\P\0<וO(廾+_͑g@Lù@vHș$uBH@\RǽƸ3s)R>Zͅh?;۶7M_or-h%\qq< J%-磖$ cnA\mBPܬH4<+!g\=McV^A^2ʲdTz9Gw٠h"n2֐m)hi!VJx.ws1P j]1N,m6[rRBޔOV Fn [;vêDWWiMNf$5JEi>__S~qq m}\VQ|^9 9#31D+Zޝ]-.worԪ~:xqVV`||/oiI"?_cĶUp +d \#GEЗd̗"Y?nr]deomNm$ꙑ3== KTX$Ou{?__ᄏꋳ>^vnO7, ][j{SJZwth Yj?=:QZ͢aSx\sC0Z6L0'"0;6B%4PeೖIG ̕tqTaZC1L{}dJ&q&m2"ǐ rLvZTc9v@jlaY6tk)72>\]HΏe?wϋzoF|'r*ᨛ]18,GIL v5so{|# u򩵅b4ͱxnC0h zp[9"}X*j੷J_db!bUޕ]Y4~qY_?znDol%=$̷]'5zSp}[`HР0S:-kyxory}ң屒 +i Y*KoB&cUX+M]HP Ele].*oaq֠"!~}A1^؃Vu3%i׌ӷrgMȌg[BQJV5k)KJTC'iu KCVh)x)'Rt| o^evȚO 5juiv//0FX#*hxZ1{EPcE? Cߛy?WĩY{t8<.kin}7-HN;'Y[u::='"VMdđJ|E)Vkyd6y1arχJ"|YoCmT^;B_ږ9V)o~!z{AHX=_^Vͥ"W3ΊW9ڦ1vvj/6) ȿcz]OimVi'zLʠ:[vL](PH1)׶HH=~7͛zMss,TMdB+{FW3٪RQE"X^UxV%Vg~VCdy.g؎b )4IQ]?y̪rU2>yեl3BJ=O_tw; y1t=G3%+c_v9JvBFΘ=EF}Opm!d<V(!hDzlC=n4n:n'b C$D_}7.ëͫ_{G(!_m}xAZ# {O)?׹- hsr䠋@h'Xˌr5?Sk#(+ZCC˞m*aI%+dH v:ەJФ )c!PSa%:D(,LRKVvӰ2,@j'(!LJ䁕N2#r6Yj8w?2ݪmvaL!LAuVnh=nJ;ɹu&5NSңcEcӘZuwj0uϊTu+yVg]F^9r\7:)^b|?>{uw:SfÿuڼF9b/X%V^gWͭWD֐ӈ%݈uN _F a&I~ς>:EW9ݬ?w" _DGh12K09csS\Gi(N2"x8*SH>˫B`Yx% r^G7-#Zx!dڠ#1UnIO]BUgK5CN`) pg1UQrA˔1 aJ;/l(0b0VJ=. `>j<1fFbpHu/7P6*f!fodt A`EUHĞ#Q$'=jA##`t֢uX֘&d@qaC0ҍ[%cֱA8o4;(Jd̀c0ڃAkʉdUS9n/WlPl*K-JO89BUw_~yvcA0{o1? h-:9TW.I$U&)2-H85 VXjbx;XobLWKUg:Q3[u!4Eя8i$93P[W!w~P7yi?_|9)F\^#a_~Z l5鏿Nj3Λ#~(C:[?n M!MHCZ?Q+&TƓ"z.sÒ뒥,< h,FTI: NM%A$V*x^rd!+,]C4jGS '<>mX[i'Ȟ݋Aq"iTV<\]q޹˰E {oί V>~/yv}W ۏʷge:BIw!.x=ǥ.>q{O$_d뻨Qnxǽ>}SY~WA1gW%7{ҵ v%>wȷ>~?uT㬯;=an zAőmbɟo|x`}Ü&1^_iQs#A6ӀAJ(}ytº hعMn MNwc4NXGtPLnާ쾣QLIG~?^| =pffzZյZy1wvɧx2K8%qYfRxsN79gLy{u{8 wr-Ō*v{n['mOOO4&,g@^I -O%(9+gjA q}BV~nuzMaw/.OCVmb:v\?=1&&i4,`<-`Z%/@h0oimiy=p{L='VGTrP} ġc$V C4u1 )W`xTLT9&n{-Řt+vTphoMP={&JxkTɓHR![YEo'06ъ GkkeG,@~vy]_)ڻe{WԹFlMv-mתmLk5*Q6TJ2 "[CJ\MQ`̌4֥- dfi#+.cȮA?m@$YѨHX xL"ZdzUԍ\ԍ̒˅`qBEJ}6'F *,B]V4iV K$%W«) dҐzra/`YIs]db,fܳW K,i$؞] $avVbtN`g.\$Jk#/p` cv4KHM(T_KFa-[cB Up$R=X2`Fz9S <42Iȁg5`WبVJӯc|1K+$(^+!Ci*dEy!-VaQ`2V 9hm}%jkZd' ½&yZ/f. Μ.w_9RǡpFcX-=ŬKILQLuQ@3#wKxK {{u4tC(`$ѫtX_4Z[=" sўgPbM-CZ p_>`kM[rV-Jߤuz-n֧!zjM:璯\*F9B6v;SGwO$Pgl}N={ ^ ?.;+k)ޢh=9a ̩-9s/˲3-I5lh9<)-- guX^<$ ͳ-Wi u3.TP @rwRMY,VU$~ p!ATdQY˽MlAYyA[W.u5b[WUh%NL@?pwը`hTL7ƈ]9]V䎯MOvfA7C[Fah`@hkӮԄJ[W['yUVw=nI%{`>dƈ/ ٴ4vA[MiQMRGN`ꧺӇR0_v:>a, ozWevfyxd1|{gݮͳ;+>gXFS3|%ֽ_OLOSmR0rsL?۰ka{Eg1ꃝ{\44C^SR^&oK7,#JX BN3H#S-;n^@ yS4fNqN T+yot%(zzVHp\ INg]qcf[:t$>B:ڭr#t&N,YMfYQHa3kCֆ]X lGYA(͂% /<<^"҈&ޝF4=T2*Kj@s&pZXIZš 26JD::>CtDzFT)HsV(^G6(ɺ&K1Rit' <0u_XqŃ#r=utgʗ ޼a6󣐿 7E㼱XNQRścglY !dYCb[<Cl +5x +Ǻ{|E@0r #+xZF=jd$Z al,*K#/2z$5ZaS2L fHh<ᘱL(F2e\2BR1Ih71RT٬BClV[,?J#,g.c򠿶=p\LprD4'hX tg{hCUecF'pĖةDK@f9N I"+xGOmeNPBvũ^W!uBDі=g14#^X 1Nb p78q0}XvX^8p+fr>&7Wd^-}|AϮ?xFs? 9}=/[l#Fn{@FN@&02#9ɨJ0 "4N 3EV\)dޠEyb^{s}ݖ2D9Hz^gI%U ,rTIIR-TFXXB4{QbN]Q?6Ih$`^)A1L"jISV9&LSЌsbBRi#ukG&rF ==ֽܱ2O\SPe,dO 0}|R"o1"A D~_ IwOE؛ ]$w⡿yX=\Gs rW?#./@0$B>+T  nNMtxĭj.{UluQrb 4x OsZ7`F><>Hxq\x+!A/ 4ӫ l۫S|8uk??M(;`K>☬Ϭ8*+ߺ޹ zyAn.~ Y`VU vǺƋ V[[ w g!%B7P:VJ-U0N5ᄐSmU0iOzKkpr' yl )un->X|i\dp5tN7aنp[ӡhӂ \a*+2ùk& 1TiFU\Z*zb"A5V5;ZL[aiPL 1Rym[cĄij.P*doAؾ~#Ҍ\D䃾ZAx<ϐ {<}V﬩!lKm˥rtq_<%~5FM{!,ڀkqCl<LPςw¦ɆBg9YJ ↹3 )fRpLhjc]<{#yl੗TZܫ6֕_:cYXxB3^WWc"ʧhԜVnћmՔ`؄I|`sb? -쪯\MP#y#` ba.v R%ɊEiݾc^t@ߝFgڝ5ݡK{Q^_(dOJ k@]ҿ^rh5_c<c5&I䕂9q""1?Ɗ7PCHPz{En/R퉹jQ#q jN-_j]ZNj8oaXUpvk#Hz{(plVC& Z+a^&q5& Ju]7!HbKP"UH _vjfpjAK<UNmv<墳SS&f+1sdtÈyaGnLgTnGg1^vO-zҭʧh9(x[+VHz!llI03ɭb-(^mK77"OeK+25s@U-5b 9˟]} :FJGoKEZ|"C"OF;U^QW+QÑ\_%Fh/?orP˚ 6PigoI; o4lQqy8xX.c/&~#$!Sѐ)K3(lVѰ͗7.399Kuad,a=C[.BWKDBz2K &\J֖u:br޹-X~_FG\j=oQ.ӯjjrHoܤ_XB jk_ 6i|ֻiʙ_Ha9M/CY*H|XKCGr(fN4A&>㪑;㪑\&gD3$EynRR´fg,5ʼn$ 0R,-55y샢Z>(Yuʐ{x _Qڭi֧ܝGnwyv瑿;+ 1k1ʹg(O#M,Ś3LMjs3 4 ^QZQ4UNt QR- xkɱ'<"WĢ̤Ri 9͹&* ĠuPL3D*yIP&1cy*Q0A"*ar,uRFIfutQ"J(8P0r"E@Yþ)|q̬4)lSC򄰊1Xiͽsn7H1(0,]2 =ce{JSbg RI@{j=7e D;`FJ&r*hM^?cש 3FzH˛R8ΛQuƬ`wS!Pt'7Ӱ S]wcy$^:B:t.ʧ6?~DAZ-5.)0_}uT,dv /k rZ[Uro.?{˕P# j<} S)PtmUVw{~$;{ OS i?_|gWW],/%ZuI]+?Atn9-]? s2.Na:ev:sXթ75vӌp݅TyAd C2EbǤĄk}jZzJ@Kik5fA%/( s=|ڵcհJFX@Վ"`%R5 3NV%l_5AB+;&%a,]"id1l1L̇`6n 43XQ YBiަpk6c6! #щ`/STlGI.҅/ p/5F(:I!dstjFLƤQ9dLbRl)AT(Fe8VRȯ.;4߻}3[֪] ƼN93[j^dr; mC~G]I<궏Hs:d+]C΅3W};s-^ 떽.̵ \U8ݱaGm*mc]m*qKRPY/@.yJ5Y/$R#TįcU_ EIDhCbP$:[Z#dh2v jBX:őwMGöh}q/#0 kTYZDD z78 p؋hk-͆T9dJ4dKbH2|!ػs<<3IQ5Ô@ ,pB\rydD){W/pU9!>Hu2$wRfSJ@K%Q!S^@ǜvTXѶYɭV+h+  ^@k})f;b'_*B7 YĄA Cxj0!?ݮ:g3E*wuJbC_aX ؽ/ jo cX(r$٨t=\vep`t}犌J2_Gj zRTyLZʉ 5W.?r@g<υ1s",= Goxc=lշty;kˬ/̮e6_)@>8ᤓ'a>8Jl=үzmR@ IiGWmUgPWbQ #5^)@Q#6 AMښP{Ѡ5)8eFuo;}8>^jW߽F!CEzaۺ*mzi.o-\"yv|sM|xtgwίZO_|< k/+ʑ>Yˇ4ˏ4#KHF4K}iGO_RXVNG_t7ue=+SJMxjrU.yްM0m-L½UzRItlA :; sdh-p.5AMnE1d2(f4 U>z#Ø6:z>mz7Qi _OKHZASSm' 4l1wOw7_3K1SUJ(䲘Wsx}#+GeԂH(,}U!Pr [BB>_=P\p=ree@P43';!rʷ9Gq9Q%_r Tmd+uߥRZN;$n]Xv1dlzJH@K@;b`(}?F0um%\=Wߥ2#mVW $*O/~ѓC2PO{lC<%6XKJ sJ_YP=x Pup&N5G6`³ B|Qfʹi"2ǔ\leAv?ۡޛI̓< (Z1NAUSL\A" cJIsJj45p5[*sKM̛8yeD3AYE= L VK[EX*-_:x?eh F ^ڢ7֊E je"ĩѺgHw'Ѷ~YR.߁pޢ8jRKP}-⌊n T䂥:D 7_Ti4O,@Pcpn2ځNk-X6w$PZxJ| +&nm}[[ JoùB#g@F}%zLk kH `*˘RqygA IC4rTOt'8%]PDcY`dV Т`?~|SAPVuݳ AX^]]DS"8XTTS*M<q.hkuOk9%ڊ:ИIš e|R 5@D5Jp89^j/ iXe5'UܖJS*Ϙ"cg/cc3S QAk&#y{N1HP~ V׎t.17љy'3#6ٌ. H5U4n5E>..<@ѿciwH=ԒT-f2;$|ՔuҙwIiMFC:WCOUvqcC+ψ匮~3gߌbq mgpvۇ[y<1qO9 wG<֝zzy Rck%SQ?OIVCS=!KH6ۓ+]{h4u89z=v9<aՇB FI6{ E1oD~I6c[ø[Q^{UL  $(Ӳ{۹|uEW>{OpɌ o2g@K M?.b#b Q51T1ۓv1?&_WOަS woi2/m p3N}#O,EG[V"&Jϵ'f1v>j0h%5tMQɃkRb[m< .ƕPpK An h(-f_8^87dov^:LEwJ+%@i(!PzRqZ[24aL'G(Ys/ȉ;EXP ;XZt\|WxY;dVBI@udbBJ5.؉-5%L+:lh*M*}nG&t."R-6-'ES\ENUHP)oQ;k$)/RLswI2(qp"4ǟbEBiƴwiG‰Ͱ/Ib|oM1-{QG^24o1P‰ýI~N˫:eN8}&'_kX&qt/}DUyU̜V F"wP?]}Rq$_U|uoSֳzwYhՀKK4_\lCh@y7>_.RH>%*$i-HFi> LRl)v BȒ>|wPNݓ&eX(Rf:fݎԼ?$FKLMԾuiskvĝ{;Z}ڹ~nV}]SW\䰮|xtgw_ib ~|51_)[1>c%yziph(i>F2Wo$TYyef~usb}IV9򺃕ԛ7ýr` y&ZbS?n@'nN3xn8!α[9wwB޸mSq'Ywzx˧`>vl#RsIDefq"h3KyqߟVc7WQrR!^ٚՕ9ۈfœ 9K 9mcdA R+w*#+T66U DM h];3/;9c"?ġIĽY;:Gez )l ]K5௯ȇOk3Y9[}[N ope}=zBqeC/>W*9g)=GO hy?6ӡtSٔ륮7fiWSg02ODbBL[A6}/f־_?}*NȢ|~v3/WsF(y$<utwc LWwH3?ӁC6ԁfЗ3`s6< 9r"4R5VvZ]msۮ3Jr= `RX|QBD;Kq- b?@W|P ;P" oon/Gy> =_" q`p/%bpX/ǿ s`7<1c DOnC̹zk ғ =cC c6}$mP bjupf #03(-z99ΣkkRMN HfJpT4XePWڹZ~8V( pRm_}:2kS>{ZXT*)eB\>'~~sR2 9bz4ĕ+}ĉ}F|;(G{iAdW>tCJO9%D*=wfbΈJrglE]P6guOب*j7ӽo;KPaY98c]P" r49p|BRJW.+KnT>}`\=f$൞]7@H8k:E:ٿR<=fRvNjԦG]"l}p yt3[ (:HNrR%`:0fr!Ck4X@⿨LgI*#RyQJӊ p{gbbfCce8eͬ2 kfi˔Р1`U!%qq7|ga X:$A~U 0+H G>U@eI[j2MO0\7 *hzcM9 後R#$ZW㱗*h9)[]?QXjS_̦ KgQÖ7?ݜ^TMz(Jd=嶹z.+OB~\$/WJat3>$}[1fJ !0!iʒ,A,.zZꬫK}V!XH C?O5JD()j1MReH$뱽͇v. !YYU:a RBP0JF3*% ]RRJЈ[*ha,ɏ&?'B`գDfVCW>^Yw!0@Z4xg+ QqS.5Tp`6gX+BYm`DrV +%ul$tkxK/5Txumx/GWp06}9_4tFDO룾ӻSlO8[ $N4;Xr_Hh7cu+"^c0/15gilK3 }^~q> { siC6UndwѦ.z-bqPBvtKg_$ZвʛlzwfͩzxpwX㿎N(#c4x;Y}OA8$(ѨPʽfpY"1oc*Jsv=t|>Q'4lM<ɦd.B1Ky96I_|GM{Vk{ GD$H'bh N'AJgvY3psj|H }7s,J0H0!wUa RL+R;/͒I$#DdYEДRafqPD[:-K,q,œY%d!t|%lR t5n3qJ1FIdBx#?x$$fbb" Dǃf vHGZq0:4,33^X1$Uȍy@LK 3^09l?ԅ!o(0S@EbbÂwW#cêAԳIߍpj:&ZvlD5e wZaLȢ尔nrÒ2G`΢Sΐ7KOXsUL#D']Taz\YK ڸ!!:~hߡ"W-֝38MT(쬴Ӝ;4Opv#4t=)aU~K yiwPPm 8W׍; Ω ~&*RIY(9?3xxVL✘wn,/<9jy8 ~ш`9QJSNgOC}x p|U3>kD5/F.fp]pBi{\~Q%(""{rETk_Qm_4},y[kE1b"YI)`)`oFw$Ȩ):`y-5"D v@Qaᇆd\x|߾|VY%(6SH"B,ͰU0`(sdb|GB$_Jp&i"IEB4L"KN^#+O%0R/ G]%p@Bid _MݜV4]@ܭ *K!6WgPbhh%S,D_D ‰9v`pݚT9*!8C6ʌReHXLca"V\d2Tzߦ]"X{Oܟaww:yWPXԿ RںS<:4OFخij\{}ʅb{Qsm>6)4 9#Dz Z4 U.rQx[wTGr(:梹FGwl+ T-M+ǣ1/ۃDzHΗGhxuaYTX*eAw@AN}VWETSnPBaU>ՒGOd1?' 0yiA6 6\_pp&Jb~o/?S-׭[Vkç'Ec~(_>, ǀ+r;s#xNT$Gٻ\}AcÞ3f]?XKZ[XՐ|"#Sp[_MHAQMm <-ﵮb[|"#Sv;ׅ;b盘 lZ:/6&D&8⺓ DIXG7C,#Pts@(oWw4N?cmu'?K3Ya< tRUk޹ZkMf~x $Y,wn#nh(&d33bF9Q#ЍΌyyQ(vbtH-DO+0=h|g>-rrvܗbOGr٭ "#9ׅ2:gJRZh֟440lfN.~d gLR/(ɶ4u+ٚ~ٝfd9;wMeM0=+[|̀fw.!475ҊHټ2P9BDӺ *ꯊ/p?,ACQ5I+AN_ 2|}D>Ue ˮ[oJɄ])%8욌VŒu:&u& pSe5y9hJ MV@<'Z\HS/Es .FQpJK438MRe&)%LkƱlHLjpfHXP̛,QdeκX$.TΑͰx9_wE} rlxmѐ.Ju\ _X ( Q-BYG9/}-zO'W6>@ޤy5`= ,+A;>\o|]_7mȦIQ>?yhM< RsI y`~9uȰ?dbj2O?^_S<]?LMgwfdޭSh Y~/'rmе/"Ϥ >1C?x`Q\Bo>KL{o^E]_s LjPbٝ[&~1j9$Zv!8R3R J|e۷k*޹j\ +GƧ~quyш`9B#啒_i@`iknǓMaf>X@';tU^J=6W]&uGY"etؓ5!/(&5?:4tң@;[^lb@fV`D.Q^ N_h*QG(]Fdwl+] G􋅵j-tɴk-Tᖢ\[%RΕetQ8bV5ph|Ny˭rA#uZTVrABK]Z<(ioB}ܶ˴D-0(a!1&V*)M}ЂH$,NYJՖBu6aRJ+TCL??bQqh#*z^WHHjP, 74Rm%Ds !LQ.R?{춍/3~qVj7;xyIE%d*}FQ ẼcDFht#| ޺NxUN;s'Ͷ8yO b7VsIABBl ɑJ%lR Q< HF(@hU.:y+rD^;X NI(a6fQ2!) n9lcG%"RD< AYI dJ0 sqP$+ȅ1f(Li!m*06?`#4[˥悜нH~\2IfyI&@1eԸB"~E5_ \8|BvEFHBp){7&/.GvŽ|Kl=PWBpئ Xm)_+?` 2t52qAԲK'э_K7!FB$Hɮ?k([aQفo }܂:^ \v H kl+f94zAS /zJqTְ­"ƆdAhW)$En18x9eW^,nR0H2{w +n\]Xc]gԐLB3F`J "s$B"(UfLQeQlz߹]ٝs P6b3YS./L 1ݧ{ýWpwͭ&-\ߕ~VZ˵)1;3fiBKy676k |kn Y43 ^ .{Wm5AZ_x^<ҝ$E=_̰Nt:'AvN5{ӄE %y) 9LRy"V\cŸ&7~ٝ /jjSs, ݚ{ ` BYZ,q,gPNRI(E@h:\Jew [Or.I 4BJX9`cCSU 0ުoU(A4zb'8])q.W;.b"Ddb@@pk9tAUPXu_5l<3hv!o̬ܕmpD{n&ׯ3|qߛaOҏܖ5(X~v%i`M`]#1[ s^I2挱>Ҝhq7lDQů(Av\[iСW9x\ܜ܅Dt(tcf#8`S1EjTBt6n A2^qu]SZj AZQc5H ǦOx 4QGŻ?  _o`B &'nY7OP3DgED`Yh tF w>-U\z]-UC{I}~}|?MoI}j#,f~2=eN1HT&M=zV8urYIvjJ]Xr0D9Pjz)!# zesp\ w|;88EL@T8brq.YLi )$U $S,/HH14 0Y Y,I>cl6w,iiVP-lұ099J3XLE[ a+jon2Hk޷sԦ!]j%P#kNx 05}S(&ޑ)4Nlmx+ǁ|e)D`,@`kwHꃯ4}Db,ÃXn=5iymv.zFEPIqL1<> '+>L`2{/}XdYwt^voǦg碖sz67aq\n. : U~-Hb)F+軃"At?qQ|<]AV&"+ WIY-;S$蚲:N+hk84fߒFlQ2} GĔb NLy?r=Ó"GMG#كK{st3k :v!{~5bNP9 'Dܜ.9L$()GoW;d3fR#e 1+RBL̜(qM\*th/٧x _HI.PO\2F̘H&9J4SRY.!IrD1rHU!aN H#\i8Ƴ)d,6]i eM9bS\:{so6{Gg6Pҗvi#'>D.5vƏuqK2L@,&yԈr,_޻.Ӫ_Vnݚ"wx',gK`?|44I25 ݔ1)~ddFY^Ӈw_g%+'?d9Mz5_2ӃSyUn >?dtw22_0Z%cАGtVqzdݏ,dC لuO>!BJ^6] o|z6B~2AJPЎ%/Wa"C4ܣ UA)N81x#^D^0ϕ`L1e`iJBBlbPX@9J䚦Pxq)Uv'}A=A|\ZeSca{'R}9=/rh춉p%L5E˰ȸqNrNプrx?~Q_^ F* R9 )*_V>a䠲׳tWOk>@S4YV^k@ț?f/wƋ_m*(J:S IchP aW'a/eЫL 6R?.~SҠi^pp8Wӊdy'º+yEσ2ae9Lo}q^(]UA9*g\?D:"]Kz*PTt}$ 򝇙J)QMIi3h,l'z0)[ҽ%;?>՚4vftEX:з߬]?cOw~\=-?ҟfՀSA~lOkfd1g\*y^mgmJvI|gf^k@򅛨MUgo$hcQ^ ȯ}b߾}tqW5L5H$E"`sT+._=~7J?x:< ht)1QԨQ݂:ewS-(P/s5qR =ݗ>;iy꜏1݀"qm"DǕ9Rݚ 싒Ɲ 3Ῠ F>ˡ9fhL_DdsZK50!Z41Qk1CXh)1ڌؾe[)".1(PZ /:`/Imxܯc [9.z<=d$,`;}W?>5S ٣M%90cO!F~^L+o1@ PC6}%}n?H?}i\t3Ƃ0q XǘUNa0-scMwi Y4eBl`S` )o Zq:</:'\@_Uy ~PV@/杖̰&>zUR/TALssd #|6' Z*|kU1r PC!SReB{4g*` &ySXRe)ķMY!Vtf/{]/)ݭۺuI QdeŻ4PYGcO5Y#gBGmH&CIFMt l8-\TRb1`X DLF̘H&"H3%ʜ[]>Rn@X ZAms _VEqs &8/:X8Hx9gz-W!ח zX20D%En>ਰhxEN6޶ˡlݶ GlC he'/^~6RX-> ~S s7/rP<@_&xN7foMq*6yk<|SŮ1wp[㯦oꂜc:\H֍No{~JϦRY;µYVչmx=En% Uq?Fsj =(eZ1[u\ixz_"i8dGh$JÜkpȂJBdzCϲMw w=/mi6\=3Z$@D_'|O'ظw6JqN& ȶ`&Aϩ:JBV1}Gf]o7f{?sI5]bQ rݦmbJͮq@wnGLLg:̂}XJbĺ84,7anpNb,dCV *yZBȮ1H=|rC+5d=aPiWG~WFj-r8ǬdT:`Bx2!tξZD!Eg@pGw@b-0X 8kc5$W"G( csR(pԥa/oke u D!Q &9I1_`V\3ZV^]c6k~wԇnkDTï>/ڊIٴ_ zPW_A5G_,F;B1ޠk=.8% mYS [<N! a\Lζ-l8(Ry˄(r(v9̌Z eBo*?Vүj0˜Yrh[9škvmmWhܮV7|f$ߐ'V[C[o&oF w-D ʻAwݱs]ĸؤV@owߛ3\YCR2h5W/wr S4|_wsO$͈<ɴUe+m'Qb(g(J@JB[ba'dǸjG֗ VJ}8<" Rorn 4Q#jzqpZ:|k2^PR nh3pXDjqMgr*Roa:4o7FQ )F snjHM%siszsz30t-Uwn0z̲H{'k=O{a2z-;zx@0n"{dr3?ĮaZ5}dpq@wnGOgg΀Nu{\vq"F\ğW kyY2E3c>3Ygs뙛RL,9NC'76낺ݸ[] Ngn' @[nMXȟD+۔""_m"dT[D$': 7XE9'W7b}jN1(6ge ʘq1IthzxKCl}-e!߲QbbX;S cVRmHAZqrgS.S.,8wX&:^$ClÅ2,t}WխjЉKɔMH>Sbs`WEZ7 js>(.gNW e!kB=$XX^0^@Жؘ7kB{-X?j!'!O[׋mWƚExF'kR+/r=%/ _`/e1㤃h8d̨Xxy6~C5l6{ y_|V>y3}Aofǽ+kl?`iYBܗ k!s /(Ͳo{y ?AvH55 2\Ȅ!!2;҉-:w^Oy4sӅjBRYŹx(4C"Hb)>#mGJ*|/u^_ ֥NbN{䝢BQ2,AlČ  $7bKqc_njQu9An> JKMI_8Bu,}%'Nf븍EZ=bBU8 Aj:xE EbSQ{B+6 N[݇%ą#yG;^0>bbKhlFZtWB]rRw:lgR)i>S-gV4By Ly mN-n/K͋NcRAU= WWZ=aBmǵݯo;8.lGNn9%ᐬ>Ķ eS3mp4BV7[yt![n(:|*OF#TJJ^)w:n Džaq 0-!@*#I`c#JSehWJAgȧZA< Nf,թh)-Mb CU:!un#d{-4"Rj,a(3' 2^`"RͽC %wOSEֹД*hTKBdӡ"$M63qH"HN9ȱҶseb:g ooZuLAs?g| Lwozaͫ]č#?[ ^EY3."y0Ks-y`m  ~%0/j-ʝᴟ|2~JƓgMs ޡȵXN6WJ;N0{?/1ן+˟e5+ pIB<1FwmqHW 2b[A~`؝dliZcyrN7XUnYyQZ.4mS8d0x9'? sP>7=5\8eZNlWyoaatwZGu63Pf|!P<$~[i+(y=Gs TC͜ %Ρ(^rӎ?/ @Iq^v ?{{/af`/͐ƬCt /%gZف$YP~':Ijɘ2J cޫl|("oMد.M! пc" 7hR1jvљ guAt﫛lE~zV޿~;_J c0URHfcTTH!}~s{ ~B2$%VbB-"'Lk)^YAIc`iYEGP7<|fڀSg`VAjjQu.j1RcV4u{Rscw^JƒLJ/-LG~bb%R;Y:װ%A2KGpkuozRvN HnY[ ^fKf4>9. u1 ) }y(w2;r:YEvSQ8n' 3BNଧa>CxNw7{^J#GNx>aOue~5\MVn6,(Կ|JeyV SJO&z fƖ3)?P7OOq3Q'l!vɲӨ}^NBaAF^N9D{~gr,2fjtR{5ꏵW419_ݧ/{Wz,oԴji-k*2ppY)mAE*6zTu`kLOPwnf\|tJhќ|5{cUf+Ugn2iI}v+Kgrjƛ"LW6߄z91U*Wm9cM 5*qxPhB%mN| ۱λげϟ`:Nf P|;0[H+H:쒷 fB+ e—E/,DtގB#ld՚K<5+lg@rOH[ "12!UjM=,%4gVFCpI)X|z 0H0 SP~~oPU$_J&ӯΣ6Ek2U!aҠ93Iz脭R\ )4Ul{ZHD6[e>:2]>bj,ven|89lj(Pp .1sH3z`.rzuw hV|dne;cFH/d],+`,w_wlbB ցR+A ~SQVc " ?,DMqkmPh,*do ӂL'%UkN}j 쩈v;ǫ15")FFQaC.Z1M/0suƃ٥0B0q&Y*\|qﱗ*_vb)K[T}Z ^㽊s*P 7xt&\KȒgKchcJXqj,V[`L"dləbu,X'y" N n}jE@ " |γXr:Hl>ie[Z8ckqBD8`ՂY-ؒ$M'Z~]}orbpai 7k2h"G5C#&_ZuJO?}tmZ뛴jܤwoM5 ~ϫWoz dC]]wo.VkJ@-_U)=סK2 x{Ni/\~ݼܮ54'=8;<岧rK'RA4LC]٣oC"bX@KUN ?Rn"[.w]ףZaۤ}Fh[T lbITbkHKm)w۬93wQڬ  GiC,JDeZVZU"8 0^Z-^AKn{ov/U.zv˰ )UQJNDV 8^y]V);?} vƊH1ՈՐYewU*aDbAI'i+)Tm%>%)5UVL21zѧXm7c:D5-XaNC }`R߫'N4tXWL[k.Z?73Pb=2$\mҕèQbr 4X۔tGy:Bw[׫q8Ba`I+n` @ Ŋ"WU%?|E=r͡MREM[H A.wt43PBp4K\}9XQa9, e fRȱ.̸(Jb{-hGumy ;,G_玼^ٞ_]O>D|as?t{?b"WOC??WϽ}﻽zB8Sh u߭\>9&]*m@ w}v=.}EU=zp­ = vLͳFrc."&ZD:< Xrv+$C%[p} ?o/;KJBnqvnc!`˝kn?\d?"!sKYwFuIY9c*pe{R[X}|Fφzj b=\Ni21Ewb9StVnijE3Ef D2S1BO9St:d$VgM*OyPt4(&s#+`~ҧusmtD׶.M$L(TZ'|hdmV! I}wW?U^]j>7ș2Et &'ERktK0DQi5jѢ1V+ݷxҭ8Q [קqW݃+*U݈͂~ _*j?wMP]DS w>)e'khDޔد@ 7 -3>ڎ hfk_\k΁[i7Hmp*'DZa(/ǎ5ǟ+D^R7Ueeu4B9=nXrb 3sdKp=rb;Sݢ׃7/ě/0RkiFFlDQ@&mg\fHh%{fc&|j6[TDhi5 } {鳄{k#á-N2*o>~T=½زux|=~VգQT.߲rRBH9 rʪjr֚\UAm}@\ Bd$u?B 3-?By+FBy[9By7nffB)cW9@"[[p3rWgjgEֶwDYkA=j haHV}z$YIhaV[XF@Dۖ] ^]Ts F٧<UJԈޥ+Wvf-wi ; jƆEL5]=)ek~ hRNlB3VNѢs@W f N@TPLi*?sϳgm<g^; X?p ,J^G\s-_g9כLAlpղ9\oass.U j.':qPJ3n3“NSn;JЉYXr1E@hnEa۔D.*Ԗ eϹAH4 5KRQT&GmbD Y LD8*)㓵YDsՇ]'j'6QbDtmDK QU&;řEYLL@-YD6ƪ2rldfj峍AS$;jxޑ$c^%,fy` i sٍTgZ/smϿz>AleH|v1E @RۻO'!3c)lBU?WfI6[ oIi/W{tAÂ'/Zq~?o[B . Zc~4 +ϥ6ܹ>eWXR8}cm=(:DA(gYdY ֪G٣7ZVBhe#!6BɎSsZkp6-YeC#!<7>mה*5BJmFj}f0fZl4^OXF/Mb M΢ Pc-sf]zJQ8FX,eD;dn782oJƁH{p>XU2s=#lqY6FAvu)Akc E\{SCRNĩwPU%}k+Ң1ݩH#)(.R\:{P͇,ŶOv/>W (YzcFzhk[5Et,Vp8,;^=*{|ya|gB3pχ{)oaB>΢lgWL,WX : J=> ,7'8; d5 CĢD>̃}PUcgf:Fx:Jx2͖VmYqd #cmGs &cxzTg>W=-dIvIA͌@fuja u` Xh!Py'%)GFxQqxۣ0~b!'5)g R ([@*2bwWFΓt/;ApлlĩwiI˽\>_@uvw|F՞豏ݏEfSL Ɨ=?nqcl:8LfR^v6qXduV6yu:;<1QመIنwGm+XvgvU3 fdcHT1숚wXwFćR $퓮};A" n沞&[cB1@h,YtiM2/1 (:#(ea Qկ0R&fۃQf`UZm\{Sre=b(5%@"L@M ciadB_6}%2}M5Ơ Tf, ~TvG-5DbWqb+U AB,Fuъ%H& c6ƅ4R%VWF d-$8DԼDP{DdDO姼(#YK +-~=[[ņ,i-FAG^U'%2S7?2NV5:UH,2^T𒍦e j%ENQrhκΨ8AqdӞJ>u"O_>5zIG\>;\>jy0?çJ==,t_>d=kqG:5ۖv_>;*+y Fs˖eɥqzT"<{ܞۮN{3Ϗm#"6K?BmBL],w)Q*^}7VGعNqd~Zvz﯒[=t1E@˼:6 @T Xa% fX<(+lN3rQƒYG5 u0k}{JKoJԅ.|Ӆ'nP2B<:PEI:5j^?u1톇r wWb73HR;2˜}E'wh 3L43(2>$?ˌ;'뇃'z|6z?#aZTK@35YL9P;֕n1G,$ze~tv&jM߬1HQm<<<)Ȓ-o.fb0Tۍu M"eaa.<: .qsʋrm0(| nM4^CN3/&|jDg-1Sv fLX860b]Ou˥hx|I^=/c|P]:~,Rwӻz!>* A'xs:ڸ01YȊ^.mo\p4P O5aȎgmq~s +pS;e]X1YpEםϑi-D4oUE=d&3M֫~%ͅR"ː9%sbRMS4Y r)\#^}ԡkDP.!Jɀ3 @>x(a@ޣ0N}\i.9|-ȯny`]!1!74R^!H&Á0>)` ۄmtԦipr*h3NJqrFq!Py٦@v^8HvO5EK_12=KF2 X0KX! l')pRI M̕16c P>XLP 1Bb_ˡxn ZsyWlM3},%m D p^2q$b[SIM&ӔA:Dv2U C*X,#`ʖaы؊"bMz}GXuDС+s2Z2܉vH x;tD!qPcɲIlEZh4C AXňތX_GoGGBCRwxe^EɱZ͎aw;<Ó[^]hU=B3X@Z@B fшc`7OŻ`ʣacS kK+NCn 5|A [Ov<.= `A|fG&y٣`1E%%:Ju&a''q\ mi#uOdQ\މcJ.!z 5 Aq{ c$+[ S~wwM@kDEA";}\a41ԍ%ٯ;TJ?EBҟZru3# rY_7_tȚ.3]ngӻ-QuV5Z_/C;Xk/S(~by6>Um?^x >w˻ (?_qQiCi=)t-+,KqZWnQ6U)98 beb:ߨN{y֛wڰWnQ6Ukww r[ c7jnEN/we48wkB^n HGЈ2vc N@AP 9VS/Vwon]=@>J\\k30R=.x/η=~-`z-/+zDSRۦ`OF{㿌ʹL$(!W-|*{7zgt9|۽";ij`.d`˔ddZ!)sɲraIQ!\ÒFtoUqexF@tG:4N Lft0|k~J =S#m(a,G'Jj簊,g3vuXט†Hr`KS,I<f(M1KA!Q-7}7v )бT=/cAq@xA5ֽaGtЎuo]pb@ Gz]iP5X+ D b1q{hsB) AbS0QUV`ȵT*{ba깄аHj~ z:b*_GhsZ>ݻRGhjƸ8و֟;jD?FVzgGymu*Nת4aL  R0~y64~pmՓ!%Ȱm2Lu/FBt3G#6>XA]zg QS0Uy6`gQ+oԩy?]|V庮yρk۰WnQ6G]&û tbQǻ.J_7v<|wkB^Fٔ@gqx7I*Wn22on'EHT-zHVwkB^nT`,Q#D=< dM_,,Y \òo<ƪnO,BQ,a e DB \ݞ!o8YyEƾ8|0O )Q5gQ~[T3պ?ZwDGwSi4p~n};K|ۓ@|R*j62Wc lT<&BU"=nͺQL/!/2()9 *ζTҨdÏZV bTJLQ '_)=E)o4gaJ*% Z)m:/)I ҥY%8/A)I;KT2OSp^ T}U+ۣR (RܜKy塚39=zӇ_wGAc Dm5 ʯ˲P1 )S0i) u&Bd_~YUiB^\ti=P .^Y)7BA*:ء<AONjrAa }.IGsX͵H+oީh]dE#L}o{JRXfTj@[up73_3HDHR_~dX@?;>lrNlۃ?`6#* $L`b? RK?~I!0d9LW-3}JTv}|ĈDӏ-+}$|X 8j%p-O,a"WIMȤIg $6J'Y9jsbRMS4YPvtY*Z"ViI2]$nf齿6O*f<37Ong˧7VMT(H;Brh.ƌ1.y5ǒMrg !靖=dXKl3/wwXes Ϝ ~|t~ߦ+/=_ Q=6w x+T`p(cIFKxu4 2#6ZU~w?}LU% Ht$2ܱ]LjY*"1#'XSJBGKݩ*mw&퇅\k55HdE68EijrQ 'sP"^2E*źm~0.Dm>>YlDsAJs0/@h/:i.#7zmԳJEx *5~z*Rn1:weʼnD9WW^&#UK d{d,ٝ)ӍG;>H| /BlbnO ueHu&}Mq2q?"Kх,JW,=l*7=o]Ӊ+؃6ci~RSf@(dm {Lf$$8ܥYNuyWs3aWNLCHٮ<JҟtUiPu:e?H騵m-.`9$#M:Q)7UFajRymqoaC RF/|41]{ ؼ 鐶"ESYm eYJU1Ӡ4u1hZ,sXJٹ lCcq 3!Wp($ Q7p~9E9bԠ|)ڙ_*|(EH#r #DfaNI*(7ķXKe).S~B\fB "VU:m %Ja9CL+ʘok7('SXG )Yps{/!nrШM@+̅B[9HŻ–]e5娥Qԁ{)"⦠ya5T)AQqj0#oKs ^L ձ.h+ nϗ1+bn7~f~Tzwq\r`l<9>>99gTgD=f -H֔x`9ΚOFvb/t'%lY?j Sf5?}㧻ohKܳw5OdXC~%8뇩)~Pa;O83z_ﳥ Xh=a̠y3&Z N=|lܡچ%'@As`5XuTkEIá@[T@ydrb h Af ۜjAxE']{4>ZǀD"^4;+mB^FggwezXN7x][%0[ymIjL 'pXO+_r+p6jzm:)BRD$2LB[42:Hmn;9tRF.S3eHkŘe:*Yk] s2VtQG`ȁ;EZyL3{N"oB IA22 9udrH\6yp{cc,jTcihEɕ$wADFޟ"''͸\1z(OO9ԜsvZDIJ!@淹 '5gk1RA3<kI7k%1琉2KaBJ(@'P\EC5M_J;uI3`0E YV&h˃BOej$5F!gSINFe4&K$SJdsWs[RCaj+OI}%5/\ѕ|n˼ZMKtKjdҚK_ V"T'8V&wȥ,*|.DۿB2j2p ugLrtKBوQ(5ySHۅgv8YN5xajEGh湣Fiǻ&l_OVa4jZ9qfW4'B[F o/h_j6m}/)>^]˯yH&ի[賧A(.I/D㬩ڶcXDㄇ`iYnhvtCё0ԦPIԱa0k7Wa$ʱݩ] F#>TP O}!q*VzBcDIݟ6B,'ꠃi"\O'_y5I&P ,_Z7g1\;ْW*)swy#ϋW'c]h5PioC\$*ϘBLlȶ%'l:p8g1/VO5bxmW(X*h`{ڠS0o_16I0j ܫ{ ~p#@X,zbhᣭNZVH5z!]Rh!/Fe>ܙP%}(bpAEQpD-M}3KI!ʀH5W ̣ Ќf!Ȋ(;/͌Q+ bPzWHLj!Ls"3.9$?4:csx\7e_7kZ]/uL$lBZ~ww` F(})?Z|_烑^xYI:_^nͺ np-vahHKZ;W / Xխ~syIz"Tg{ {+uwTk m5iԧRsS_6d!0a/㰕 (P}-nG76 𐄻&y`|eӸ.LO_i7Gsl1AkcxÍX)\}Kh*{..nSQ#qYt5Z64!bB,Z016ꋫcqH(4[ '@>#J5{߾-mqP2ag(M5>˛Rʣ\^UBFn/enCtxNFkx5SSI33errpuFMJl N^8vk4ᤴ[֨gw@2[oKt%2G6>2-:vB Qֵ()5k՞g0ZY,x]XYhF%-zd-#qDZ5yh2NH0s;zZY==j 3g@ntjB3ÞQ3+'^+䬠3徖G)l쨛=+x:?v'YŹ+W1T{ϲe={;U<"Fב~( A*'LBJLSs3N'?d}_߇t~|t}Ї\>AnL\?Vg>C{Ch$ ڵy b[ =R,dYz\(CrFh * S!*Jj,C&|JҲ!IZmL3ЌIU.'lu75y.,E3t= [ )rRS.A(v`RY擵kڸ;oj-Rl}\H),70aJV[Юo/ ;Ķl0M놫ZI0e^ngfQ7rwW)] @* eeU&r2MѮfʤӳa>3 wđD8Rrq)k#FO rƲ8AbPyW`+\(f|J8  ˡ[mХ*6bNvĜvV2p5Y8Fo>R uUk.3gp!(\ &J>heӑժTVO~-:2z%t9)ƪ JtמIS֎ 5aE2;h9vGVi"ډ VM8Nn~{ DVk#V؈hFKeO* 2hM5ʒT; a3 7pZ"-[/Ul뷁o18`Ehځ&3Y&<ہiY5~cf -2' #fKGf͑r?@Э9v$9a!cAVys&{\7<@K%~\BbQE]c#!!sZӦ希X3K҂VmQBZcxF CI4 GD{ g)볼%FV8)( T2,䣋6i@1eUJx[#vYܖJmr/vh]8EC;g{8mM Qq,).()um6ŽVzqMvhyHԾŮVѸGM 0ٞ&Qap\!\}K`*Kn LEr7b!l(u]iA;,72P ! '!MF'OZ8j4qz f%e,Xfý.(6ֆv֮rR77ț1 ϋL/w]~JG+} [Vnihm ,n q9UHtU(Ꮅ݂뵕s3==sVZlc}oV˫ٓz5?EXˉtXѫX \/UKXmkhqT*y+y`Gb޹ Q*_W^pwzydet"ri9Hrw|D䧟6FfK:So gF-j)='ܭF]A ;+ܓ>t `5%jV=>Dzݍޭ3)]cci4o>}7{_ǁ~WƆ>'0ʼnclO J:͋)GNpZw6CR2/J|YC.XxVOO}Oԇ~z| PyX%\{e)w+.C_nՈ֕m0_ݯ8zpue]f g6$^L\>1$7jlP`qq{Vh@M+9⒔WOQ?9]/5{v%AT1!`>/ތYGByPr}V_n[yCnsRn^rfgSIwHg;5%=!hT9q]Acc@\ e.Tqu>U7_e~e{cbvbF_=8%FΏ=*g}{U$2؆7bՃ|ިӖRR%S,RLS;%eZ7&WWӯryëշ/n<%5Bg_֣mvuV\ .N :PdlxG+S0FI`sc)J | 9QbM1Lj=YGq@4{QQ"pPZ"ጒR܄ OJ}u *)8vy8`DZ(Q ;AG4qmIij%~o'{1CrTB]r=}z`4õLucuvjC\Oo[i+x 3E`H `Jb~EPtio2D8E q BTyA<Qfp,'͍ >Œ'`aV?go vVϘ5$E P JZi-1~܄d<:;7K ɼ0ݓݖNui uN mUGhѤݐwaf^vZmy]>|lH"gk_v$[T$T ou;ѤMwU>~),E)+ɡwo޾uʐjdm[Y!qOְS{"_<-IӼ9W^(a9 eGʷU|rg}U_ه: /-O_"۫亨⁻o*1v~LnMW޵J QwC~Sffu2[q3<*M}My::Wx(1pqM[vWpޭ w^7Xoo˥T+sf22YR& YFӖo<^9q;K 4TT1ݷtyJ߉׮_0_BMIZ>%u#t0_2Vuerm$PL+34}f@0 Dt&Gr DfNp̍aH ڈ4].{hO^zI@}2F{* ]j\-K۰0Smrl ͘Bl+:yq̘xbLmsW"$ %I`ee9,!*f-RAR %\3VBJ ٳ6/86/ Nɹ j, 2-uʬyMn3%Q"T)Y *2x8ƺ1@]OwU3$6o+[5k]o!%n>>SS:Үv!TmnWXX%r[*?3ާ*Ob˼Bw;p&.vE!j$x>/Q2n,dz~LkTz[ K[}m5KKj; .8/E@Ae3&K(YY F$֒XB-tzP&-)6ݷsR*/FEܲKKܼRcSY}e5%&^z^MO|ޣVߖYlWUumi.1~ykR>oXYohm^}wsb@0 Ck\S4E=xv;(?W+(z/G1".ȄDI/1 "9_Dث*<>5R0LY-]:=O:Ae%ΥOn3zcԈfQ:h +ɮ*8lF]be>FJ`ϣSPt>xR\GGNsbik攫P pP]OuЕk0pHhS;}~Rl|uLPQ|8cM" ;f\>kj)A8r)x茝RU2ڦiQ2Vu=VÔcԌ1)7Nn aw]dM#';+}A]YXٶYx<M_7nzn6nC5Fusnd (M$S,M<S^f so5bI>mP (P@{e|O'A:F]E jǰ;r9cҎ1m!=0 ,qPlHg c0鉵+}v'`R 2 禡S>"I-dwq'IiI#Jra$ON5His;d ǜtF2ߑ/[V 0 K;#9LDEfJ"!M2NJIQՄ6 $ *]0Ca471-ݶQ]gL{v/.p43T$cTk:"a^v#cnKz)6Cn@ =T}f ﮗmKI@VCu՗VsT|Gț=mx5tbI;uۭ=g`c<.kFql\N;S@{9$DȍR*Ө镰f%,Ҍ^$\=\ ?ZuT]]ҏu2dL½~&)>0.8Yw1nxo 򆽡ig`qЂ##Oag?r]6E/jo9;(V5,=,rP@C=(!=n{PpzP/8"3@ɵ~f+qAKgvjbBfpq#9;VpN' ϥF-(5  Gx 5_:Hnq@Ia$گcJqj8V)} J}b 1 s=$=8Gc`ŘoeT)1w4ŲK.ee׾0IH)"I1$DU$2ͅbtIYQo-5: Xs3$[!j۫$+^eMjsX# `aFeoIIK*쾥"J5I(Ji!Ll-V[.:ĽHtvu,xi>_/"iW~^LN/ID 4j С갏#59a{qAqO&s'Z"7*E:::FԋthqdJ.n-ބdL(DR4ń,SQhd)+LJUJhswЅ@' }uE_]0B%TK^2r̊] t RgS#+sHEUa҂rF`?h11{Q. P%SUޗz=Kg)6&Z%,cZXny>3cĵ%_[PQu Hx+FwK --Lz-CwY!?a+ۙ)-|1E%i{ԝe{{sKeRcyUzu;Ѩ+C\@hݽ%.Wpe1  jh8FQWBn- ]!@n3ժm5>кŇ .\Is,6J^EB{)A6B  'IRϪk:z+h Ϸ kArى9b3qEugq'^qEz}!Aě-nBuv6guVmGaO6_á;8Nczg,J:vl-\`ˎP$m;I,zb^c"@O\Ԩ??_a"8Y3) ;5b7 "c~QVxηm͡M&ovH3Zt;<>*'T &Rҫe<c ƽ\*5VYgŔArJ(.Mf.ܧN_b0mìO&+AY\$!o\Dd w~d[(|D't*zf@Ԛv HnuH Bݷ8i=[(|D't*횧kM{j:$䍋L O#L)!*u2snjb0_.k T Zv8x3ع{ |wύ(0NR|M&47'nGECv!I]eCx5@!X“źQݎpͮz3Ԃ8<4{/y|nG+Rj|=W"$?^ H(-!#\{‚@)mC";G| 2;`r;N l2+SK* BB>sBI){JBJ.yBpv "0/Wyv]aU<c.AD RʬE {g^)g=0^j?ǭz!xb7d 7ә|/~ h u'k KR^)M5`M!kB]_:`C(hbzKr.91'p$8.CҔ|0̵aiHб+"jĀK&3Z\$iJYx})68x1Rk,3pڍ 5RB00BFf51H R 9(!!ͱܦ4Dz!z_rYD "_6"v|ž.!`J\}uk< 0?{5(dؗVXu(_|Etnu󢒉~MʜǂV cFSI j}IS!C/^]V$6'իC|=ԩeSٶǯ!'3gϗ_K;,Kܸٷ3Є{w,hq.޼ Oo T7-DWLBtiHӈs9陯왹CPR 1O׏{j;}' 'FQҢn^maՆ"mF ﭱxuO [+kڪߪB5)FeFAYҪ/QpIis('$ D °3~ӆЙ~"97$"%is|yNO3hX397Li=w9ŽOb&<{`a -{ՍKyJ!YI%njVC̓=)N"2&QN_TpE-]fY>oo>${oA* 8 ?Ugx_#+ZZqC,TUFB0b$0Q PT*ΡJ\BQ(8z8q)6efi,?Ǐ*yߺ͢O"O f8Z-Zv[fjigkXW5K3:de">\Q)5dbDltfq:khF\ ` %&?(!'/Makn[<+7ve-(*j#HH`Fs5=q͛dVEb̰ hX `/y^D/x^T6)A{vNlt5hl$qT|IBr,fO,{s8Z îj<-~'%2 =}Gs{OŊX*( 1JIbi#ލs9JXc}L"\ E🦻T*`ӞSsڰ8`q̞[TWt~ @ >Sa&GʺvFKw8}*ro8FN=S8deHC\˿̇S ٫P_EDo K + _Gu2HDDXFJēT&<% O)J$5)|JJbT eJ.&)S̱42"Z d4<&Zs@AR?T"bcV~;_Ԥ_W-z7=7˿7Y8βIU|5Žro)iy]>I.Ikҵn΋_NwtrWQir)\_ޫQpF UEPO|)We&x/)^;nM.)w6ϊdj: .8x8NMIB v"8U_ʒ>lkiKI0NXO35uYt eT)/2}| `s;shwyhV}bZM>DX2ZTe1%Gps LYK_EsمU]w5N[۽w1U3㬔]y 𿩅Ǖ1VnRf} {sfI<5XNWxb{ҵ=1.Q<<9Bf,Wkx#?P!Gۯd81@:HQ %KI<6B|&sd}Gž1/Ϗ&T~K kJz#R -|J~o VFpg` Ҝui"!-`fu[6/Hd½찐Æ:VčbM4ա7n+kqro^VjZ_gn`G5"Ú׀Qεa@N6ڛ4"pDprZmk tv%2OHt~axvY'ȱO~0! R#(` 5&V1 4I&L I@9"Y> d*A)|LՈb% 4hQ-uTƘqMql7^deLrWTSJ6"6l GAhōlmJ2ײ[[ne~؊b1b(%~-QZFPO,EJ֖b >ǀJ9IXr{)!v8O15) Oڴ6Y~6b-~P{C _iJ!׾p9 \ ![lgYlᒉ'w$z0{h5k'1O)JHD$(.y*PCcb۷)ΕlRmBxnMY M齯cۂ:1Ʒ'R8LL5dM;Me48h*VӚ=8/!k(p{x ; " t@|A`[yOt 3Qgbawh*|(س-#bpݐ.^/1К@Aƈ#J X 4c-S4+$R# Hro50 EAگ$C),[J%-/,E21J\t"gBqbC (CF& )3)4VD3RT޵8#䂓-v%sAv{n{CJv[_{-U,VU ݾyY"$L'f&4zJf Cpqm~F8Z'D(2!5C{|PR"x>!NGXIF>!1+GX3/!HE0H@m{ã FJĈk5c$ADry814ƘǕ[=PҒA}$b}$"llLHa<on|I/O*o6#UE@ԙ-)jA'1Fߞ\62G MqCg%^;3V@vV:~xh&F !/-QJy`{:߳Lph52ޅw5A+yMеz6(ӂyh`y7z<̦yJ;nЄN ĵD@әcJᵂ0w"ZԵ=rIZUWV6ڏضrۨmkõ[3w wӻ;Gwd4ӻ G+&X,/:bW$$}tk"8Lzӹ* ¿%U'6bk P -:vNTAqщ+BXh|n/ MG+B% y^ ma߳Y}PjNi L%Â"jr%L{|p짇׾e3ɄwA}~=Q-ޙ66`fbwm&V֕f{?,ml3UhY{XMYn+k˗46˗yw[{7w(Ϸ5 O*~dt3jx*C1XoNvs/̮rѯjS`e;kHB޸zɔ`ߧ%/.*ڭ?δvkBB޸zTP-c gH=j7_ \DU[^m柟ݚ7.%r,ES@Qۧ$=aB+=/rzʾہ#6 7Op=ϢS{t?ݾ4y &OBΐMM%e^1=oZ eE?."< -5z +{0R<.970l~`0KkeF˗es?YQOn:O bZLf|W>xPV%?`h|Y"̦nZet?p$dXMD'2 R+0|OJ_\Ό`M;H^A/ ϛ9u_2J1eEz)6n'h~ 2LM{`?@BZBa'?0Mgϋ,)`If,٧0'SL-P/sPLir PC\-rSr+{dlSλ/$PLOAڭe@O$s*P#X$ L  bj|P μTca!%6*(|Xt$1<ҋNl9_)wT]sER0*~oQaYNֆ];]ESPT{ZsZNվؑ>sIM^mwbmP|疅46&c !"2B H@sHDKUcL[ 2ꝀW ^ t{bAF*؀ 11M0V $1R4pMVB# #bB~ iP'!+&R)c#0P" Fr"b hN(I"'"0 #֗.y]V rYw7aF! h|h 4Ul> 5 ۆ,QKDŽ)0B"Lh<0@BnjzB$DJvύCF{%;ZWXm lrgU3 ĀQ=O" R^a[6"j/q2dl+ɑPn'9J 4K8.uy\vXPrߕ 0 JY Y5yZu-ݞjIJ>;'Q"3Patƾ~#NQ % ;5Jy,C\=iA=@7ub#gb]'8˿eRQ]U #]fQ{M}1C@Iq5c{uEb]tPϻCz'b~~׿c/XOߍ7b^](;L+?Jc#`fDwzmLDPwI3c"lCHP`1DQ0:< pyFlFw )7Qԋ0u841@ R"jTs<##"]!D*q#JD*$$T!4C,x10McP-`0ʉtRj?*A 9ZbLoc5ʐC@FDa5F%%0Rr$$ HKG0mi͞e1#LyW 棉Y' },pugehv5L& "1hA 9G=M !W%\C"1#G8 ^) ShpQrcL҉ Iug KB}j:v|G *{1Zq]OTg b:[-@H&`wm gA> ):o ɶ(>(&f'r0ݧ%uRPnw_\6>~73c_dՀso Mm9 )`EnINxwM+m禀ιxFV(rnUڇUj^+=Ԫ@XCОm#[筴?{!:#ֵ[h{tUu'V#]*EVOj$i (YkԧZp$UY/.:|QLۂ讝N y"#SGM@  >k ?KDAw l@B޸zɔ8-x }n8mdEVlն]bvk@B޸d1 @RI%jrK{UҾS/}N;?Ltt 3v!p\F ) -!BK1,wKBĻ+&_n;6ipw_̯"h*GG|h3;.?~\j xbs0_W]5@DM5*SGisZyYS=4QԨUyO|M#>tn\_'O%1?+RZ4bCtOFm$D'/"QPBâS/VKmdfN!`;{v7 3&^G~13T~I-`h|iI.f/PN$ e< AhP J ;JY<+ d!/#/hƋbwwJ K.C*_R.:O AP2Wvy9|:e!y`ЍtXI"cD'<JZ<1הH]G(!*>Tۃ&贚ZGc6la^ևg¶s%N{ A>?`Xr|xSgέ] lQ(>QyZecU>4>fG0}?_9:?zD ɝ%`WdO tUXEq܃  hߺO>QUb?/*A͟&3Hjp k$KND!B !Èob 0YQΡC%cC,d(7T?Q~W7p7ᜈB8L٤[2KrsBf*q*v,M_2i+<?ލx~M7j1I3[ifT2F 󓊍*ְs͕L51Уm? .+֎>`^j Bԟ^L5]޶B&M{:_(+Ö`cIwE/P&IPz)V{ .$#csF0?9(%PHSʿfƛB &/\ԧZ+ےчY%MgϋLo86x1)էp%XRe+`.cJ8?[G}"@/L?c{W^QP$K*^S<Џay2lPPz\,ƭKK/#)O'i t\:)"Yt&,<݉|$Z1X?R-G.*]/sVC4gu*K%\~ra$pyG}~.6+kܧD kIw\Zln']m9_<0knm*As1C(H.MpKR`Jwm\ho_KnM&vv6/}eI%7 %Qd"EI X"!9ͨLAv4@ Kr~@gzs)J#O{Y#>ь 0 ۊ?|yb55Px|`Q >O(ƥ{OFvD޺dOiBb&E4&x]&*TaHVlO7s91"D=G,$;/+L?6 5jB1I/MpVسk}N "i$~Vы#'VGjRqhQA0`-)&Y8lȘ ig/PnI/0Uߤ,.Nk9`g>?$lیqb?f,[.#rmXv.j93/Rg'οC$;m>7kl7n-{B4 P2 "V #Z%2Ɯ(7X, ;z}RB{e_mI!ւeM5 60NL@~a90.$c3KLR֏Ї`"im7HϿI=ŤJfMl{-n{&'seuJ?ǻ6B->^^a" ~VJ(J)Tj[X)Z)Jv-g[{tX}c0ϴ=EiH`ϘM2D*%6?d 4=|•;Ky,xpaKz&nPÉ^&p;҂~cw2tM~+&z0 ,r`H臅q-u7x ;wwĎ4(7^\i/|z~=W߃1|}kҝzk|yջM^&S#M_zz*}%MUAz&Ab])Kpݽ0{3oufGv'x-"\O=!ߧ`|حVsO7 ?H:xoBȷY[J7Y2#śkqN;Y>ӆN̪k:Oh?ѿ{Pni3L|12d߁+ ߩPl@7&'ݍ}DRmL>m'fz"(*\WpqC"Kz~MB1Sc ]ܳ.mTfE_߆ [+Uog uXe0 ŒCۗ߯7'&N7}~=LҊ/&0 oi^k?YO0{}^z&g+ȩIGo䅂*b/.c6Ůt+"N|1ǫWMAY[p_C}OoOi7[=_(5I7(tv P@ttq5smĊ~?9Z\D)ˈ0jƀQNz7B._OYˬ%˱g,ɦeg۪bF"H:(1ъ:IU>V0^}{KwFʥ>.R/5Oຕ"M|}V=~)<̠E OrRkмv {Z))([!H(IO;Ѻ'=Z NuڭJ ӜC8"&$a3$MDҨ!xÅ0TIeB0cA+h&zd3IcÍ͓{>)oC;+݊gBݫ^*%@kr}5ƤGzrlܽ0քݏ.Ϡ~H{9[Z޿FO ޤaҦ߸ ׭JxjE%!&0&wO=L0f\?OE< {. ,[Mϳ̞ן'qavy ލUJ+_sب^f%J;/54!p?+%<=6YrVJRru*?+uR+yVʑrΞΑJzMjSRE^q-fݧ^!,dc'm~G^r+~V:1Ms`uVw0WT7VzVʐ2nRIM,`.-`Ii=T+ݓ.>Vj_cKzMjnmY)}'gNjꟶRg|)GaYg2_atV<{|z|F)["/_j˥MiԴ4Sl+he<ֹP Bsc(F8iaI60$VHSX:7%T;Z7[pWWO$eӘ 2N݂9*_M*&,!@%v(Ýzv|[CNyHRTCOƊ&!Ix9VlgYg;G]`q5kmm,@s:O/De 9*lm߫vW|24_K`?M Tba'$&2II*RZbP FЀ) TFD5[knccoH BN҄kfpX)iLH C*S\aDk,4f<1d!FkTQ"I! 1MCCA"DccF#ذXe`-n'`7' ]77V~;̤~+?uv*sǝ+QocsTG0 o_Hi, AB֑ 3뒑=icJY˳άMg˳C%p:F.u']x.( K\]&䶳lo/n@\8]m|QW{M1tCmdP_DZVzP_Rnu ikzw bqQXe&CѸv/.JwiE߸z^QL SE/)!6V~%wBs-cSyc8{7[7|@V)"퐣x ͻ_ݪDls).ZزMZO5}X뙍Z}` \5 wgcóWS?'^3Gu8V+e7nkGv[Qo05(#XzfiKH;gojAۖfcΦghQӑ]6FdVTkq/跦ѨZDŇH96lb]0irw^lY^t7uFgsⴾjo3aߚɬ?Mhլ{[׈|1֍!s_ ?y[ mlo)2UDm\ ۹G)5۩g~ZV?9+1Y/#f7+)(=Kd5KlQ8Vpppg](FfsA\24k 턮0)tV)5(N"_FU@(H, B1N F!$LS'DKabM+u@QB=ՎM|\j?u|Ye-h!F BWV:g}Xy[{,r[TlQ_Fi8n|īdһܻuCVr K)0~HR jqJ-fK)ED)UVA@!,*}<9HJ;ٽ mrU5#èQšhJP"MpBi}ېyD}{'33y8"s$wyi; eQz4^ ڰtB`(PMMs)z/lZcc ^z0Wc;cain -Ԟy›F(2=\b}9wed4Li,FG8aT"k0 8a2B0V N0 ,bLS}V`{n*4_K@D<4pgzbQğ&@fe(3.+mNG//~$WCMbc&OQӏ'"KjoJ&Lr;9'I-1b/)o/:@r̺8|ݿ"'Nov_:x ̔cb~uXvGb1\VY:Wثp=,Ŵ9n,iڵY*X j0I葵iqYk }Gy%qA B\fm!'NP!LOªg[ BHn\Kkh5*BYj2P_J?{6T^vj@ڗʽMվVJ$NݙG,maw3S[|H; 5LHI:Ǐ)ɛQs@":T9 CчDtCĴ{>%O& :yzۻI+mrLg3xN`F6<,ӻa!?)pgn[!'nCuc:Cǻrq=k26dڑ Ώa'gORUKCS +*߄6XfUB\fyF3VoCr3F;-\8} [fr,1T:3zBχ98~??F;il|?kp.L|?鄦aƹhSP&&n"v$k4bRC ^oLkB S.UX^*p0-Q,RuOO[P.kjtͮ.PrP3P,VKP!Sł `RSZcV)>W8h@iEMmFI7Mm0+Vj/[:LBTl}Y?zltL">U3=dU)j0+MӤj%,VY˲C7MBfTvJr_n*;\^; nIe6EV7v@QwqΣ[8ame~n!iV<* hإv݃m,ջ㦤HH2"h}W}ߧS`;<<@I(2{ +fgb]ʧ*WD maT\pd>.V1Z}c .Y=HvEc؆E {| PFa `€Kjj€p\HYEO),Wf_|U?Ӌ5&ZޒjK‰E,/_ܒ^e]񏀃䍂lqV=.Ǒh|"dLGR޿c"RAh*ƊQ={qe9s^n""_]:b6+zӘOur{?sm4 v MEvpyƃ0X<-%x1B-#Q)\ުa0sF,xB&jJ|KyjAw`[hVzg']Qw&/V/7E&X]5{Qď6Ydoem[×TޗƓV h"cUt&ﭪMUF\^=I%.տ_xZ,lq }!M7wCg^+@#e 0|SzDkRZQBАi)k-.ԔqM|nQ3P[Vj!U31(ֻ[/'CXBY;9e+Xaש T6Уhw4j؝MaqlFMz6{=}U@om>!@by*FYi[.E0t>awmCT x9AVz<%cO]*5/ڸj]&7ˤ.[&NJ?T$9S#w Uö`[]96f|ǻbճø{#BT1pg-V^=sRepe̷H k\w)p?dV9}O"`j|+>ܴ 0z봫N}1cPGf aL]) eê.|]A٢r.\_@ ㅞ9wYLyZm&чf%\h umZ&ֶ\VMmj֕ΨMQ񪥲i%i]S3S*E]S'+O퇅QWhpx궎gmgM(;- /a>CݷNV"Q~ilKժPia_si8iKc4Z@P@Qi>A/G1ekkMW//}J WGC0cCSI;86H}RXZZiǸ_7k~szˋۏ㧗)[fV_^.ϲ,nn.馼}<6Uk n. `u!'x9v]Gdmԙ59Q EY**TϚu54%FrJQeM]WJӌB׾ ໳f_m.KJ]QN@8s=S5 8'(sHzezBG#7*}4J#|EE}7J̎d#CNFzY.eSa3Vї8Dzl|N5hޡL&) 4?hPma8Av!= ։Tg'w!|NG^;`ɢ>'Dnb!7`ٞƳl4O } N\'\E8:cXv&wz?4cGh=y;7+1a,puI&2:eɬGHS*p_3at&spc))3\6ٴ7]$0晦u)`rwi2`+RTښ N8ZѾ.%5ڸJUSgh'%U,0|b]`Tޠy=n8߇krlFh8ۓxUܥP#;2+v״i V#O.+isx1axH}/_P~k8t!C(]uf:;ÇoI: wZ{VEP( Q obmSu#Bn2{wןDV=/")5PdPa:|JMCχsz%2ό}ݫx]xN0uyqSg:;v;s şKw/K@.fixل$I-mɒ&K~۶vSKdJc<\ÕC* w̬ {ny:AZ}$yuyoPCuuH@DVs*UFU[]Giڥ0lU x lJsc,xQ/hդ\ڜ"ĽD F'}Ns4PܗuoYS ":%遺~LZ[ Bk!=jWi{u?12WԥͬNGllft]ٲxΨ1rz<[6yVMR uVبtFJYX银RCyVjh!bax Jίq~WMן~{MU}U*ُx H9cHZZS~4o]˅"Qg`vnFJ됀іn3Gk9}򘧅:HjPMqVttA-ŵY2nލ!qyը]+Q_z%ȌYhmL+i qTM썍/pIAI6Z=m|}UDk9(1Եm@eɩH%!]J;J /*D}(u5DNN2պrTKvr+ѱ˕(mRdߕ{^ծ{ټ_! E36~!u?\Z)=,l#iw=5Y0iyδǎauS}}W];ZL 0`F-A.JHf r{p)RB+R3+[`7kks]h&B=[uF= =x4aL5uu`첉7u+6M0UȘs[f-4Iݡnns0٦l V ؔVlzJ2DKK-%'J&K钩¸ǔVcmʛU c+5lmKQj,!DGܢ#/>4>;nT  C+lJW>8}j{vӚQŊ=?[,ٲdn@KxFǒqc7{( |xj,/隥biq٘\4vv!@mp7.*.pzUw*lZ߰- `oZP^i8Y Naf9߮#a3xȋZ䵺˼J%WVt 3`d>f y0ҞCq\UX|B%7t{%VO"͖&%WɊ>ޒu__͵O߽ů;ǿ~0 fbZ78`%hyܯ+ eLZdX;w.ۧX?PW®+Hvr{GtVK'SWt$st`9!C%gd#w,œ?WUW50 eNoZR~lMvR M#s3 aM~O ]l%h>ڪR+=zZ+3նs5 (v1s}hU=ڷLBrH~gHP͡p{lgo*-V]/% dkc'(2F;[_Vg>nFOe F{v`.Xo|ʘY6k#~1u)W&|1U1ŽW %b%92kYưU`h;@sz;Bf|JX&xps7rSaY..}TtլIyǺ[Ut B}2UZ+Kȟ |>0jOb [2af˷mNLYLia Kz%'$DR~ 9K$3]|#8ښk5{wq׳/  3Zќ1<- Ke\j\XNG}qq>ېtJ;FH1+F-4w4_׌U+zUcE=oB2Əu ؏:7\ صzC),oVj`Tfհg#tp HH14rprj&$<32gݕYH2kɭYUt{23%D?o~os\)񐃌>F'/m˂IH/+fwcdNHX\m8ea۳sTq) :W8p٪[tCY@sΤmW *8^q[5ͪ\UP՗R֊=ÑZuj6Wxm巏.ۧWq[e$!TsEⶣŖ^Ogm& 2@}}Ձuݐ>mJ0c\{_j tnM$;I$I|0#Rx`C5ǵf) Ҡu׭o`z]unu|Θ8OcuU^Ⴒ@-!O篅YIG3^!ra/?~OF OEH!GɣQahɫl|MRTTqER VnF_eS{/8]< i%;TBvj pCo9<4keҋqdYߘď&KoTv:ZWQIT]Xj.ڕGcURғ j=#YNT@PОl>Đ%Cmg WtJz /*P2O$f*=Wfdui@DCA@Аx==^gZ߉ҪQ`i(u@kw=9: B(X"&[` 7G6?_ہV=6[xsK$Y]KANHrNU(Ɠ̦mjΔAzʄ4\_al3#t T$^\.26I4J܄8 ]14ƞbQK#O_bq\Kf3E^ 02Dxc2YVHI ͥLOt#WVV`8HA0K~n҈c|vHdXo=KT*w:bs]YMx7Kdᨲ77(#}YKs$j\Dnpta v\~e/G2Rp$ЯO@5Yؐ\\'R]c}ۙ8_-$xsFU] .))OLLP[8Xwx'STG`5glm,eo|ןkWE\fqI:GKx4r'88' ۏޕF]bF&n0}F?qZB#XDEF,1dj/8- *R+ĝW=@|2CO30@.^!&rMc^J0ܜ+^T3TϺz6q.{`Y4:Y"rtBwYBRJ}ظLHHEt?U({ PJHYn7rgڀdnV=ߔ24UZaL I6@<3~bɮ=G6!DrHYh>HbH̅IS><3wSOJ~a/&c?-bD9ʚcT3+ЯHRz v {{t` Ԍ؄kiK$5$Tu~L@[Wn 6(.l+BѦ-`p*d(1rR h0}J'i=L{dBKE+QqKkb $P493puE U@t6bп@ITAkI:it(naFDS`9eSTH`BT89n}7iDѾSVk3_a6%%)iTFB;kur\+ӕן$l#EA,B)Ls}bBHt@ >`ڥ&/>_#bիҖx"P3+5)ZX꘭H6%B+ Aqږk.3'Dg<æ9VKɢRЫfb(I[|@Y`|TW\Jꡇ5/4p5薖,Ռ+1 fRjq<Vwn&.Ff ɧ31tU?Jc$)1Jc܎2̀r`F+iz fv ]I hJ.[97Ėe[ޙ+x/`?($iaD)n!&&i#eV@yXL x~KI>;z?e?u^?/0nr RԢC uM+2Xeh8 dS1kJ'2GPl=6z^TnXEl0&U R6t x2 )SӸtw.Rd.hjbgd9d%G}&q$ML;4If9t v*I=+(\m6sFޗ6`=ک(_ NhLQ) Q(QH{'չtwPWjL!5WU(*tўŅdEL~2oiK>TrW]oC XR(j͖Pu+LZʙ5 pۖSGCGVf-盚 \)KQ㩖;vsܟ[zaXӭ07x<ȦQ#ÉsRrFl<*RK^ v {8-u[4I{6K\d?{ɵ1r͡J.r| HSSꦭjdsq9L( m*Ij7&Mϟ}K+֌lfڀOrUˆer mLgF޶%|KZ\bݩ$כC5@*"]'sQ3t #fJUz$9AHaR2BULY%lK?=<8M ArYi{r '_0,7vnglpRzRFJyMV>[( d7`|kj-% V*# 5ԬU[&v;g}a2|Y[DoNn =U X.lI*Q )l 7S'0хCmuqv-6J9:38jTmL5[`""@䈞Fqu̺^"DomӍLΩe)BO!1RNu}CihaPi$He1X twz ~$@D@_b27O͓5*zz_Jxk?z8oi_׌:r׃(@ *wXTNA^vd&qJVoH6Xgvu?THTfoCvSAV$uRSԉϫ6XtzsVPIP/βnE$i6SCo?r L;k'Ѿ/0W}DF.dڪfJ1M$`Vbz̔e\/C AXekRɅS{C6S ƛucGm`yuSX+DUm %#U\mMTrsxWd_걫m?Ar8)^qx(7I Br4NMN *-/Rm^="'}d/ĞAV @LUc#4;|Ol[> _y_Zg]E3Dcl&[!&z4rӳ;?_b%;*,8v9꤃aTvSC&;u/J?!_Pؼ]_v4>t&L,vIE d_59Qlb튕CV頴h- 4%@j)~̢\vWns*UL _ZlJi}OH]{C/WGPaW-q#|.Vn%i^_,ؕ[=q{wvf/?gl8-AۮDѩJAwM%d Y.=.䵮!ç$C\5Uq $^/فa0""xjy4KET/CAp.d7(2JH5>2"94ݷz"qoz@s*ȎxVE(l;9?-]%ݚ(f2[+QJ5Մ^@;?r f1 ,χ["i@ǁ V9GJ9;1zENʊz("K)u{(!IchSabܻ.篜;ѻkAKτXcFm6fs*5#Bͣߙm~dSLV)?:)6?)U'.#=hNnsn.GpOϪm@3ْ!jqb`7*ŜU0%sQmY3#,ƙe’Mfb8]jvU;<(/ʘ\a>R1sC ?HkcoA&LԗͬGܸd4YiFw}z#f4Iiݱ6(rXZr`Wz"RTD˞d|cwp/.,r_Q"9xe1Sd NO(lK)է0aibݗߵ|ӥS jzq9D&&Ṉ QjǺq`PϢ68W[뫇^"On糋k{^7_@gD|M9uWb/|V= 3m~PKfR*(́yT] ;P)T_Y}J}ӣE&=T|.RI+G}f^΅MzQ>.Ʃi4ʾA84/yT}1)6e?/#˽"Pv0{C&|wX߽ ?CKZˏCMyqLuZ?x͡mP¢BgQfmwz8)E=d}8&zHYd{nW1*ZP]]]xR2:Y!) k cNl@-:%C`˥Փ}Nj'_Vpv?`֙)بA%k[⚄zy`bLs]ˊ0?[&gK'(]C.ˍO<ײ->?9XPM$s\BHH:‘{hf]txB*B8.OB@ !`izZ6?NuƱz Z*yd"f9G=wa{rC/#q8R CۭΙ T8sWuځD t RcӁXdvG"̴87An1fWE.׬U!z E&)c1T<W7-]ʩ*)YXzCS tM؊xhig8=>/$QOStbK<;$^Ǒ m~5n≁y\(.Dlgm໔)@8a#G,0뇛#b,`A- r 3 ^ T)(k)_Wzn]kDHIЊEѿTk'BPg;!f{N/ԭۍ`K7n}_G9ABKq®4/ۍÄM_'@#cGO6"vcBթw6bUSZ ]&`JjWVk}℘BMZw91n^+z|YSWZPu/ZQNC|vH1V~Fn~GD//";?i upxe VJ'oGC wѢ9HtubZwC`6؜8n*ZAPl+2DCnywKp6y+LMvd!d #j耚jF cɩƫAwrcB醁[RѻQrɣUu}ip@V?>R_CE +VY8˄Q]7eFX@e!NP֓=^=ֱf#cyuTGţ; =<kKun9ġv,(4=kVٗb24G v"!}fVz:aH43t|+F&8ߓpڸVr_f;~/K}¹G4BJ/ߥ$t.8#m7{m#~0g. #ڻ6rdW=ɼ_ a{3; fy9l^ؒ.KvK6uVKfvYXUdd1nG%gO'jX]|X^O =9m.R(@ $/4oWMH2Z?? O/U͹{S3te`Woj=5>C7S Od5d?QDBetrA~fjlɟ \Ͱ]z7_zݡRG9tBB~o:_JQ6iyP7Ws4_i>NG뫻+Ͻ&VL \=z߿Sxw/l`Xӱ_jhrfon_f4=Lu uS GaW_|?zx>RwjSDa0ě³QYc0J "=ق6IlR'EbUF 9WWZ3k{;!LGVa;*ؖXi' 5 ^mJ*eզ( T@āВ0 D.T @`ĉ0Fg2okA |fu1"!iFʜ@1xPQ&E9(&z[=zir[pX2XYB_e)&˷{fQ*ן^>uzwBWiI?YWS[&T^tA;߽}Je:/9fq}S:,{CǼ1"x1T@k5r2(cq&^L`v}scp<$|{_\z>X&[HRn-b~kiBEo-`_{OV]?}z~Y]S=sh_YꧫD?W<)_-N~sC:0rʜ8͊aFͭ*ݦ#Xl$n8z:Lβms57f9v $!\Dsd7m=١nJt -v;EBܙvOEKnmH3,juqnk7)h\ RD'w:혻LOn][EtCsq b غP:ZJ N ҳM勽;Ngˡ)_8[6y xK$0$h@  8iA E(f1D @a C4 /@"CA0X)X;m m/fGY;Yd6-s$;'  ifA5L)-W;-HY;zeK?Ms85eqxM*'wuOR " 1NR. ' )FsH P{[3o($u^' )n%Z[4p;wJ`ԽaN5I32%t,k;g OAZn\fEsd ve|sGzYTʁNFFt]eچ<b~Nl.ݥ07ek_&&\J s=J 8!MR0J[OTIѬNaQ&%XQ-ID;s1H9lN†[ԽvAC+W 1XHۛ`-GDh~h>J|xoH22ԇNCyد*;/=TK^~71 Yyiن?UU`+S۫$3oŇ\p)ΪbMAmoR)^ڔy0qbb^d,Qtު֛rz+wm0Gh%z^Yqϙ~yAJ!AV \@zǪ`Ǹc Y;aF*!CA58* _jʓQQHL1pK vw%L0$S;nZ͎KO_r-<4M[0r EB#!fɶ'rɨIJF;(Q&( '8rDL?~O@B/vH'1iꐺ‰^ C!W5}\k镛m`&Zh̆xv1ž>*";G8J;m89'՝IBXX݉ '3Tvw|M3TkI/w*&z4 ':wek $CjnmUow.ʻhV-2@'2wŃ ?@WoX Z6uXBxDs_x&~ቶkB(8a]8aaolnN54?lN5t_X#R4lќjѾunsdy"|uTAR1jUV펧ےe|'j E4KN[GuE$FbԤ/sZ}$$ҸHf'lABfT.xm;ȵC[.);F!O%槹'j6$䙋L%V"tɕ\W7)_YW'|˺(ARD2d=Eg륈dE$k&wHIϠ)ɐ+%4**/GYi*u>5QeАPy(3ZQgQX! gGr_oZ5b!ak3.{"Zbgnop2i0p0%`sMZd8 6׃V6iNP'M9&h,\k!($J1pJw H.+>+T" Tw&F(Ƶ^$тPd!a ٽh丰zyyi ὡ |+Y H7A)ڤm[F !獪v<,,ywmD2%glQٲm]` ɔ6@FEb:+OiLOҸ!d b]:ZAi1JN\! ;00!F4*~rneϧjjMs׉$[gDt$<1µG{GZuhA'82H犈&\Ɯׂ}B`~rm88}6JFRi#P )Sh e 6ԅT )Fr!F`,B!bm5! w[Z%Xh-ʿr~{nEҰ 0NJP /11E@pLŊjdB'dnc_Govar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004745234115140072310017700 0ustar rootrootFeb 02 08:56:26 crc systemd[1]: Starting Kubernetes Kubelet... Feb 02 08:56:26 crc restorecon[4694]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:26 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 08:56:27 crc restorecon[4694]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Feb 02 08:56:28 crc kubenswrapper[4747]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 08:56:28 crc kubenswrapper[4747]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Feb 02 08:56:28 crc kubenswrapper[4747]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 08:56:28 crc kubenswrapper[4747]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 08:56:28 crc kubenswrapper[4747]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Feb 02 08:56:28 crc kubenswrapper[4747]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.043062 4747 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049710 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049744 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049755 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049764 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049773 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049782 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049792 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049799 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049807 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049816 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049824 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049832 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049840 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049848 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049855 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049863 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049871 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049879 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049886 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049894 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049902 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049910 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049927 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049963 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049974 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049984 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.049994 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050002 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050010 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050018 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050029 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050040 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050049 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050058 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050066 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050076 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050084 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050093 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050100 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050108 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050116 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050124 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050132 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050140 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050148 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050156 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050163 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050172 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050180 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050187 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050195 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050202 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050213 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050223 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050231 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050240 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050248 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050255 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050263 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050271 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050279 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050286 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050293 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050301 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050308 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050316 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050323 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050335 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050344 4747 feature_gate.go:330] unrecognized feature gate: Example Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050353 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.050360 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051478 4747 flags.go:64] FLAG: --address="0.0.0.0" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051501 4747 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051519 4747 flags.go:64] FLAG: --anonymous-auth="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051531 4747 flags.go:64] FLAG: --application-metrics-count-limit="100" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051542 4747 flags.go:64] FLAG: --authentication-token-webhook="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051550 4747 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051562 4747 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051572 4747 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051582 4747 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051591 4747 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051600 4747 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051610 4747 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051618 4747 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051628 4747 flags.go:64] FLAG: --cgroup-root="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051637 4747 flags.go:64] FLAG: --cgroups-per-qos="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051646 4747 flags.go:64] FLAG: --client-ca-file="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051655 4747 flags.go:64] FLAG: --cloud-config="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051663 4747 flags.go:64] FLAG: --cloud-provider="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051672 4747 flags.go:64] FLAG: --cluster-dns="[]" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051683 4747 flags.go:64] FLAG: --cluster-domain="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051694 4747 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051706 4747 flags.go:64] FLAG: --config-dir="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051727 4747 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051747 4747 flags.go:64] FLAG: --container-log-max-files="5" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051763 4747 flags.go:64] FLAG: --container-log-max-size="10Mi" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051776 4747 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051788 4747 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051799 4747 flags.go:64] FLAG: --containerd-namespace="k8s.io" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051812 4747 flags.go:64] FLAG: --contention-profiling="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051823 4747 flags.go:64] FLAG: --cpu-cfs-quota="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051835 4747 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051849 4747 flags.go:64] FLAG: --cpu-manager-policy="none" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051860 4747 flags.go:64] FLAG: --cpu-manager-policy-options="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051874 4747 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051886 4747 flags.go:64] FLAG: --enable-controller-attach-detach="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051897 4747 flags.go:64] FLAG: --enable-debugging-handlers="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051909 4747 flags.go:64] FLAG: --enable-load-reader="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051975 4747 flags.go:64] FLAG: --enable-server="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.051992 4747 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052024 4747 flags.go:64] FLAG: --event-burst="100" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052037 4747 flags.go:64] FLAG: --event-qps="50" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052047 4747 flags.go:64] FLAG: --event-storage-age-limit="default=0" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052058 4747 flags.go:64] FLAG: --event-storage-event-limit="default=0" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052069 4747 flags.go:64] FLAG: --eviction-hard="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052082 4747 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052092 4747 flags.go:64] FLAG: --eviction-minimum-reclaim="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052101 4747 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052110 4747 flags.go:64] FLAG: --eviction-soft="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052119 4747 flags.go:64] FLAG: --eviction-soft-grace-period="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052129 4747 flags.go:64] FLAG: --exit-on-lock-contention="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052138 4747 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052147 4747 flags.go:64] FLAG: --experimental-mounter-path="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052156 4747 flags.go:64] FLAG: --fail-cgroupv1="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052165 4747 flags.go:64] FLAG: --fail-swap-on="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052174 4747 flags.go:64] FLAG: --feature-gates="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052185 4747 flags.go:64] FLAG: --file-check-frequency="20s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052194 4747 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052203 4747 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052212 4747 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052221 4747 flags.go:64] FLAG: --healthz-port="10248" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052231 4747 flags.go:64] FLAG: --help="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052240 4747 flags.go:64] FLAG: --hostname-override="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052249 4747 flags.go:64] FLAG: --housekeeping-interval="10s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052258 4747 flags.go:64] FLAG: --http-check-frequency="20s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052267 4747 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052277 4747 flags.go:64] FLAG: --image-credential-provider-config="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052286 4747 flags.go:64] FLAG: --image-gc-high-threshold="85" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052294 4747 flags.go:64] FLAG: --image-gc-low-threshold="80" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052303 4747 flags.go:64] FLAG: --image-service-endpoint="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052311 4747 flags.go:64] FLAG: --kernel-memcg-notification="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052320 4747 flags.go:64] FLAG: --kube-api-burst="100" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052329 4747 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052338 4747 flags.go:64] FLAG: --kube-api-qps="50" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052360 4747 flags.go:64] FLAG: --kube-reserved="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052369 4747 flags.go:64] FLAG: --kube-reserved-cgroup="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052378 4747 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052387 4747 flags.go:64] FLAG: --kubelet-cgroups="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052396 4747 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052404 4747 flags.go:64] FLAG: --lock-file="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052412 4747 flags.go:64] FLAG: --log-cadvisor-usage="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052421 4747 flags.go:64] FLAG: --log-flush-frequency="5s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052430 4747 flags.go:64] FLAG: --log-json-info-buffer-size="0" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052443 4747 flags.go:64] FLAG: --log-json-split-stream="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052452 4747 flags.go:64] FLAG: --log-text-info-buffer-size="0" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052460 4747 flags.go:64] FLAG: --log-text-split-stream="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052469 4747 flags.go:64] FLAG: --logging-format="text" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052478 4747 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052487 4747 flags.go:64] FLAG: --make-iptables-util-chains="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052496 4747 flags.go:64] FLAG: --manifest-url="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052504 4747 flags.go:64] FLAG: --manifest-url-header="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052516 4747 flags.go:64] FLAG: --max-housekeeping-interval="15s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052525 4747 flags.go:64] FLAG: --max-open-files="1000000" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052537 4747 flags.go:64] FLAG: --max-pods="110" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052546 4747 flags.go:64] FLAG: --maximum-dead-containers="-1" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052555 4747 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052563 4747 flags.go:64] FLAG: --memory-manager-policy="None" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052572 4747 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052582 4747 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052591 4747 flags.go:64] FLAG: --node-ip="192.168.126.11" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052599 4747 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052621 4747 flags.go:64] FLAG: --node-status-max-images="50" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052630 4747 flags.go:64] FLAG: --node-status-update-frequency="10s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052639 4747 flags.go:64] FLAG: --oom-score-adj="-999" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052648 4747 flags.go:64] FLAG: --pod-cidr="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052656 4747 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052677 4747 flags.go:64] FLAG: --pod-manifest-path="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052687 4747 flags.go:64] FLAG: --pod-max-pids="-1" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052698 4747 flags.go:64] FLAG: --pods-per-core="0" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052707 4747 flags.go:64] FLAG: --port="10250" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052718 4747 flags.go:64] FLAG: --protect-kernel-defaults="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052726 4747 flags.go:64] FLAG: --provider-id="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052735 4747 flags.go:64] FLAG: --qos-reserved="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052744 4747 flags.go:64] FLAG: --read-only-port="10255" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052753 4747 flags.go:64] FLAG: --register-node="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052762 4747 flags.go:64] FLAG: --register-schedulable="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052771 4747 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052785 4747 flags.go:64] FLAG: --registry-burst="10" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052795 4747 flags.go:64] FLAG: --registry-qps="5" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052803 4747 flags.go:64] FLAG: --reserved-cpus="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052812 4747 flags.go:64] FLAG: --reserved-memory="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052822 4747 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052831 4747 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052841 4747 flags.go:64] FLAG: --rotate-certificates="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052850 4747 flags.go:64] FLAG: --rotate-server-certificates="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052860 4747 flags.go:64] FLAG: --runonce="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052868 4747 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052877 4747 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052886 4747 flags.go:64] FLAG: --seccomp-default="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052895 4747 flags.go:64] FLAG: --serialize-image-pulls="true" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052904 4747 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.052913 4747 flags.go:64] FLAG: --storage-driver-db="cadvisor" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053137 4747 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053147 4747 flags.go:64] FLAG: --storage-driver-password="root" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053156 4747 flags.go:64] FLAG: --storage-driver-secure="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053165 4747 flags.go:64] FLAG: --storage-driver-table="stats" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053173 4747 flags.go:64] FLAG: --storage-driver-user="root" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053182 4747 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053191 4747 flags.go:64] FLAG: --sync-frequency="1m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053201 4747 flags.go:64] FLAG: --system-cgroups="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053209 4747 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053224 4747 flags.go:64] FLAG: --system-reserved-cgroup="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053232 4747 flags.go:64] FLAG: --tls-cert-file="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053241 4747 flags.go:64] FLAG: --tls-cipher-suites="[]" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053258 4747 flags.go:64] FLAG: --tls-min-version="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053267 4747 flags.go:64] FLAG: --tls-private-key-file="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053277 4747 flags.go:64] FLAG: --topology-manager-policy="none" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053286 4747 flags.go:64] FLAG: --topology-manager-policy-options="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053374 4747 flags.go:64] FLAG: --topology-manager-scope="container" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053384 4747 flags.go:64] FLAG: --v="2" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053396 4747 flags.go:64] FLAG: --version="false" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053407 4747 flags.go:64] FLAG: --vmodule="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053417 4747 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.053427 4747 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053670 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053696 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053707 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053717 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053727 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053742 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053755 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053768 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053781 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053795 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053805 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053817 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053828 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053839 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053851 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053861 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053871 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053882 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053891 4747 feature_gate.go:330] unrecognized feature gate: Example Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053901 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053910 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053920 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053930 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053972 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053980 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.053988 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054001 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054009 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054020 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054028 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054036 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054043 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054052 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054061 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054069 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054077 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054084 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054092 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054100 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054108 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054115 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054123 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054130 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054141 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054151 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054159 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054167 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054175 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054183 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054191 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054199 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054206 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054214 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054222 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054230 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054237 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054245 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054253 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054264 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054271 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054279 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054308 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054317 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054324 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054339 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054350 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054358 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054365 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054373 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054381 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.054388 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.054412 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.067133 4747 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.067174 4747 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067318 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067338 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067349 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067360 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067370 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067381 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067391 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067404 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067414 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067423 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067432 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067443 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067453 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067462 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067472 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067483 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067497 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067515 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067529 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067542 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067552 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067564 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067575 4747 feature_gate.go:330] unrecognized feature gate: Example Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067587 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067597 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067607 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067619 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067630 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067641 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067652 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067663 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067674 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067684 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067698 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067711 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067722 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067735 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067748 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067759 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067770 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067780 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067792 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067803 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067814 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067823 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067834 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067844 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067855 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067864 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067875 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067884 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067894 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067904 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067914 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067924 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.067994 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068007 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068016 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068026 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068036 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068046 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068057 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068066 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068074 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068082 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068090 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068101 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068111 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068122 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068131 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068147 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.068164 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068466 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068493 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068506 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068518 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068529 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068540 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068552 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068564 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068573 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068585 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068595 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068607 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068617 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068631 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068647 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068659 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068670 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068680 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068690 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068700 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068710 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068720 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068730 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068740 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068749 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068760 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068770 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068783 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068795 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068805 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068814 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068825 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068835 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068845 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068857 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068867 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068877 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068887 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068897 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068907 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068917 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068927 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068975 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068986 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.068997 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069008 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069018 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069028 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069039 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069049 4747 feature_gate.go:330] unrecognized feature gate: Example Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069059 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069068 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069078 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069089 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069102 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069115 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069126 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069137 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069149 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069162 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069175 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069187 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069199 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069210 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069221 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069231 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069241 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069251 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069261 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069271 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.069283 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.069298 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.069574 4747 server.go:940] "Client rotation is on, will bootstrap in background" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.076788 4747 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.076979 4747 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.079018 4747 server.go:997] "Starting client certificate rotation" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.079051 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.082558 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-07 03:22:16.226166927 +0000 UTC Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.082748 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.111621 4747 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.114912 4747 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.118495 4747 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.135644 4747 log.go:25] "Validated CRI v1 runtime API" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.189098 4747 log.go:25] "Validated CRI v1 image API" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.191526 4747 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.199121 4747 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-02-02-08-51-34-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.199173 4747 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.234734 4747 manager.go:217] Machine: {Timestamp:2026-02-02 08:56:28.232042103 +0000 UTC m=+0.776380626 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:d70abc6b-ab9b-46ee-8b6c-2747d8bea427 BootID:4171cdc0-0933-45c6-9d27-161671337117 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e1:2f:d4 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e1:2f:d4 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:1f:c6:8c Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a2:5a:be Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:ec:ea:71 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:c1:fd:e6 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ae:49:87:0a:ba:65 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:52:87:65:89:5b:c9 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.235268 4747 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.235483 4747 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.237431 4747 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.237670 4747 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.237721 4747 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.238643 4747 topology_manager.go:138] "Creating topology manager with none policy" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.238664 4747 container_manager_linux.go:303] "Creating device plugin manager" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.239073 4747 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.239101 4747 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.239432 4747 state_mem.go:36] "Initialized new in-memory state store" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.239545 4747 server.go:1245] "Using root directory" path="/var/lib/kubelet" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.244169 4747 kubelet.go:418] "Attempting to sync node with API server" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.244196 4747 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.244248 4747 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.244266 4747 kubelet.go:324] "Adding apiserver pod source" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.244284 4747 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.253202 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.253473 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.253657 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.253665 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.257313 4747 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.258439 4747 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.260620 4747 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262199 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262260 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262285 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262302 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262328 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262344 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262362 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262387 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262407 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262424 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262471 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.262488 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.263658 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.264489 4747 server.go:1280] "Started kubelet" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.265189 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.266023 4747 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.266028 4747 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Feb 02 08:56:28 crc systemd[1]: Started Kubernetes Kubelet. Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.267414 4747 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.268390 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.268489 4747 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.268784 4747 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.268864 4747 volume_manager.go:287] "The desired_state_of_world populator starts" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.268974 4747 volume_manager.go:289] "Starting Kubelet Volume Manager" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.269131 4747 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.268889 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 11:11:29.913612164 +0000 UTC Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.269682 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.269782 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.269820 4747 factory.go:153] Registering CRI-O factory Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.270801 4747 factory.go:221] Registration of the crio container factory successfully Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.270973 4747 server.go:460] "Adding debug handlers to kubelet server" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.271141 4747 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.271170 4747 factory.go:55] Registering systemd factory Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.271501 4747 factory.go:221] Registration of the systemd container factory successfully Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.271582 4747 factory.go:103] Registering Raw factory Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.271603 4747 manager.go:1196] Started watching for new ooms in manager Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.272788 4747 manager.go:319] Starting recovery of all containers Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.278222 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="200ms" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.281385 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1890622a81eb48e6 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 08:56:28.264433894 +0000 UTC m=+0.808772367,LastTimestamp:2026-02-02 08:56:28.264433894 +0000 UTC m=+0.808772367,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287689 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287785 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287804 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287820 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287838 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287853 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287868 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287886 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.287956 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.288014 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.288031 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.288051 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.288073 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291371 4747 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291499 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291542 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291565 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291588 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291611 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291635 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291657 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291680 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291701 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291783 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291865 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291911 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.291999 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292033 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292068 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292090 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292112 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292134 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292155 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292176 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292200 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292266 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292290 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292380 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292401 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292424 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292496 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292538 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292574 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292602 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292622 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292649 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292674 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292695 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292721 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292750 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292773 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292794 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292818 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292853 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292876 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292899 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292923 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.292973 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293002 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293031 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293100 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293134 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293162 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293181 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293200 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293223 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293244 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293305 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293328 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293351 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293375 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293398 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293421 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293490 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293513 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293535 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293557 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293582 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293602 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293626 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293651 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293674 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293693 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293714 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293734 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293753 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293773 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293795 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293816 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293835 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293854 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293873 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293891 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293914 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293933 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.293983 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294003 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294024 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294043 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294064 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294089 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294115 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294140 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294163 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294185 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294217 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294240 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294262 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294282 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294313 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294336 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294360 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294382 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294403 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294424 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294442 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294464 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294483 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294502 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294522 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294544 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294565 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294590 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294619 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294646 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294675 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294703 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294736 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294784 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294819 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294848 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294875 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294903 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294926 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.294998 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295021 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295042 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295063 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295093 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295140 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295168 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295198 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295226 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295256 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295280 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295300 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295336 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295362 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295392 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295432 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295461 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295544 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295594 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295620 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295644 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295664 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295686 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295709 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295732 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295754 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.295792 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296183 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296218 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296241 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296263 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296285 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296312 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296336 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296358 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296379 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296402 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296423 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296443 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296467 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296488 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296511 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296532 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296551 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296571 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296591 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296612 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296631 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296652 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296673 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296694 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296716 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296749 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296771 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296793 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296818 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296837 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296858 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296879 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296905 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.296966 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297000 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297028 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297054 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297173 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297818 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297889 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297924 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.297991 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.298023 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.298051 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.298086 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.298113 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.298141 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.298167 4747 reconstruct.go:97] "Volume reconstruction finished" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.298186 4747 reconciler.go:26] "Reconciler: start to sync state" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.301330 4747 manager.go:324] Recovery completed Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.311412 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.314352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.314544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.314649 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.317592 4747 cpu_manager.go:225] "Starting CPU manager" policy="none" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.317617 4747 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.317641 4747 state_mem.go:36] "Initialized new in-memory state store" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.336386 4747 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.338008 4747 policy_none.go:49] "None policy: Start" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.338050 4747 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.338118 4747 status_manager.go:217] "Starting to sync pod status with apiserver" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.338166 4747 kubelet.go:2335] "Starting kubelet main sync loop" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.338486 4747 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.339088 4747 memory_manager.go:170] "Starting memorymanager" policy="None" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.339140 4747 state_mem.go:35] "Initializing new in-memory state store" Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.339111 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.339663 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.369765 4747 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.391043 4747 manager.go:334] "Starting Device Plugin manager" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.391147 4747 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.391168 4747 server.go:79] "Starting device plugin registration server" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.391715 4747 eviction_manager.go:189] "Eviction manager: starting control loop" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.391738 4747 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.391972 4747 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.392111 4747 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.392120 4747 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.399915 4747 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.438828 4747 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.438995 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.440419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.440478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.440497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.440698 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.441073 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.441146 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442106 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442152 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442296 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442579 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442635 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.442994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.443090 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.444324 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.444576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.444619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.444639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.444701 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.447124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.447162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.447158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.447205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.447246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.447277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.447656 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.448833 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.448922 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.450541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.450587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.450606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.450923 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.451027 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.452406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.452581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.452712 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.453049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.453076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.453087 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.479742 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="400ms" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.492822 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.494396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.494461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.494475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.494532 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.495045 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.500853 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.500914 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.500999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501035 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501069 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501104 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501135 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501167 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501201 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501235 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501322 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501372 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501405 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501430 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.501460 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602282 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602353 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602390 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602421 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602453 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602483 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602514 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602544 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602577 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602605 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602633 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602670 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602711 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602753 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.602793 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603084 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603441 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603104 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603172 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603181 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603261 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603297 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603292 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603319 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603326 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603340 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603337 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603354 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.603216 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.696136 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.698659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.698725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.698745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.698785 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.699549 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.733000 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1890622a81eb48e6 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 08:56:28.264433894 +0000 UTC m=+0.808772367,LastTimestamp:2026-02-02 08:56:28.264433894 +0000 UTC m=+0.808772367,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.773823 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.797832 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.804898 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.810165 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: I0202 08:56:28.830925 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:28 crc kubenswrapper[4747]: E0202 08:56:28.881004 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="800ms" Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.881193 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-3e9c468918b74e63c0de458a5d1c99d9b84c9b944468997263f8795b712266c4 WatchSource:0}: Error finding container 3e9c468918b74e63c0de458a5d1c99d9b84c9b944468997263f8795b712266c4: Status 404 returned error can't find the container with id 3e9c468918b74e63c0de458a5d1c99d9b84c9b944468997263f8795b712266c4 Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.883886 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-2673c4cf63831047c43bee00583ed9cc6f5b0967db10d1a611c7c7d32a40b0f5 WatchSource:0}: Error finding container 2673c4cf63831047c43bee00583ed9cc6f5b0967db10d1a611c7c7d32a40b0f5: Status 404 returned error can't find the container with id 2673c4cf63831047c43bee00583ed9cc6f5b0967db10d1a611c7c7d32a40b0f5 Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.890360 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-46b878bbe0763da99e6e1051b1527daef6dcf47c5846ab343479b5e992a036e7 WatchSource:0}: Error finding container 46b878bbe0763da99e6e1051b1527daef6dcf47c5846ab343479b5e992a036e7: Status 404 returned error can't find the container with id 46b878bbe0763da99e6e1051b1527daef6dcf47c5846ab343479b5e992a036e7 Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.890881 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-3ce2f7e42fff0c9cf531096d6ddd1ae9b007f46af7f3f56637401504ea1bab77 WatchSource:0}: Error finding container 3ce2f7e42fff0c9cf531096d6ddd1ae9b007f46af7f3f56637401504ea1bab77: Status 404 returned error can't find the container with id 3ce2f7e42fff0c9cf531096d6ddd1ae9b007f46af7f3f56637401504ea1bab77 Feb 02 08:56:28 crc kubenswrapper[4747]: W0202 08:56:28.892353 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-05be08ae70818f5a9b3ecac487e32edceca767db7a337affc147ec3bd6fafa95 WatchSource:0}: Error finding container 05be08ae70818f5a9b3ecac487e32edceca767db7a337affc147ec3bd6fafa95: Status 404 returned error can't find the container with id 05be08ae70818f5a9b3ecac487e32edceca767db7a337affc147ec3bd6fafa95 Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.100731 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.102612 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.102677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.102697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.102737 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 08:56:29 crc kubenswrapper[4747]: E0202 08:56:29.103461 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.267067 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.269853 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 12:33:35.175554619 +0000 UTC Feb 02 08:56:29 crc kubenswrapper[4747]: W0202 08:56:29.322520 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:29 crc kubenswrapper[4747]: E0202 08:56:29.322626 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.343340 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"46b878bbe0763da99e6e1051b1527daef6dcf47c5846ab343479b5e992a036e7"} Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.344318 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2673c4cf63831047c43bee00583ed9cc6f5b0967db10d1a611c7c7d32a40b0f5"} Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.345560 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3e9c468918b74e63c0de458a5d1c99d9b84c9b944468997263f8795b712266c4"} Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.351691 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3ce2f7e42fff0c9cf531096d6ddd1ae9b007f46af7f3f56637401504ea1bab77"} Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.353338 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"05be08ae70818f5a9b3ecac487e32edceca767db7a337affc147ec3bd6fafa95"} Feb 02 08:56:29 crc kubenswrapper[4747]: W0202 08:56:29.529339 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:29 crc kubenswrapper[4747]: E0202 08:56:29.529494 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:29 crc kubenswrapper[4747]: W0202 08:56:29.640008 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:29 crc kubenswrapper[4747]: E0202 08:56:29.640073 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:29 crc kubenswrapper[4747]: E0202 08:56:29.682558 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="1.6s" Feb 02 08:56:29 crc kubenswrapper[4747]: W0202 08:56:29.816644 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:29 crc kubenswrapper[4747]: E0202 08:56:29.816779 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.904443 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.906774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.906808 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.906817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:29 crc kubenswrapper[4747]: I0202 08:56:29.906837 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 08:56:29 crc kubenswrapper[4747]: E0202 08:56:29.907194 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.266864 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.274066 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.274133 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 00:41:09.798621257 +0000 UTC Feb 02 08:56:30 crc kubenswrapper[4747]: E0202 08:56:30.275816 4747 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.359019 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9" exitCode=0 Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.359090 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9"} Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.359209 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.360412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.360464 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.360481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.363679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f"} Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.363741 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48"} Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.363773 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47"} Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.367271 4747 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a" exitCode=0 Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.367424 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.367415 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a"} Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.368817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.368852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.368868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.369813 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13" exitCode=0 Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.369915 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.369913 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13"} Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.371873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.371932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.371999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.374813 4747 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc" exitCode=0 Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.374880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc"} Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.374925 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.376095 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.376147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.376197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.376216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.379292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.379339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:30 crc kubenswrapper[4747]: I0202 08:56:30.379360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:31 crc kubenswrapper[4747]: W0202 08:56:31.182814 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:31 crc kubenswrapper[4747]: E0202 08:56:31.182910 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.266723 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.275054 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 23:54:00.804040879 +0000 UTC Feb 02 08:56:31 crc kubenswrapper[4747]: E0202 08:56:31.283591 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="3.2s" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.381467 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.381519 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.381532 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.381641 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.383738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.383767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.383779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.386921 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.386959 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.386970 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.386979 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.388281 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4559c8937d2ea98cda42aba4464c3a8841f5d782a7d1607c94c49d0d48c6c006"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.388352 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.390319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.390341 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.390350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.392673 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7" exitCode=0 Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.392804 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.392870 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.394516 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.394546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.394558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.400881 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234"} Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.401024 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.402401 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.402433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.402449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.507850 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.509080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.509122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.509135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:31 crc kubenswrapper[4747]: I0202 08:56:31.509161 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 08:56:31 crc kubenswrapper[4747]: E0202 08:56:31.509580 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Feb 02 08:56:31 crc kubenswrapper[4747]: W0202 08:56:31.511756 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:31 crc kubenswrapper[4747]: E0202 08:56:31.511829 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:31 crc kubenswrapper[4747]: W0202 08:56:31.926590 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Feb 02 08:56:31 crc kubenswrapper[4747]: E0202 08:56:31.926705 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.201596 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.276066 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 20:27:56.865573543 +0000 UTC Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.406171 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.408977 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e14616700a27e6bc34e76848090ca35339cb2a11eef304a3b4801d5ca6c3cd8c" exitCode=255 Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.409079 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e14616700a27e6bc34e76848090ca35339cb2a11eef304a3b4801d5ca6c3cd8c"} Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.409243 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.410722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.410881 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.410907 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.411554 4747 scope.go:117] "RemoveContainer" containerID="e14616700a27e6bc34e76848090ca35339cb2a11eef304a3b4801d5ca6c3cd8c" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.414456 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9" exitCode=0 Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.414527 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9"} Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.414572 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.414614 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.414716 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.415446 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.416192 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.416485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.416525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.416550 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417005 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417029 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417100 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.417565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:32 crc kubenswrapper[4747]: I0202 08:56:32.873716 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.131847 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.276293 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 13:47:31.795684891 +0000 UTC Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.422461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68"} Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.422507 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e"} Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.422525 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70"} Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.422535 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a"} Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.425450 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.427515 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6"} Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.427658 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.427678 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.427678 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.428663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.428692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.428700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.428779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.428815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.428826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.429576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.429613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.429628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:33 crc kubenswrapper[4747]: I0202 08:56:33.478542 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.276902 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 20:29:28.919482614 +0000 UTC Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.438592 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3"} Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.438659 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.438729 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.438727 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.440148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.440222 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.440250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.440538 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.440587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.440610 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.536774 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.684423 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.710595 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.711917 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.711974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.711988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:34 crc kubenswrapper[4747]: I0202 08:56:34.712011 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.277663 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 12:35:27.257962576 +0000 UTC Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.440823 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.440984 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.441870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.441923 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.441975 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.442477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.442510 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.442520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.912400 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.912615 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.914306 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.914372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:35 crc kubenswrapper[4747]: I0202 08:56:35.914397 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:36 crc kubenswrapper[4747]: I0202 08:56:36.278491 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 16:23:27.431757917 +0000 UTC Feb 02 08:56:36 crc kubenswrapper[4747]: I0202 08:56:36.443155 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:36 crc kubenswrapper[4747]: I0202 08:56:36.444354 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:36 crc kubenswrapper[4747]: I0202 08:56:36.444413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:36 crc kubenswrapper[4747]: I0202 08:56:36.444437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:37 crc kubenswrapper[4747]: I0202 08:56:37.279560 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 15:27:32.027256204 +0000 UTC Feb 02 08:56:37 crc kubenswrapper[4747]: I0202 08:56:37.616067 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Feb 02 08:56:37 crc kubenswrapper[4747]: I0202 08:56:37.616333 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:37 crc kubenswrapper[4747]: I0202 08:56:37.617853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:37 crc kubenswrapper[4747]: I0202 08:56:37.617922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:37 crc kubenswrapper[4747]: I0202 08:56:37.617980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:38 crc kubenswrapper[4747]: I0202 08:56:38.279722 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 04:37:03.919725655 +0000 UTC Feb 02 08:56:38 crc kubenswrapper[4747]: E0202 08:56:38.400061 4747 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 08:56:39 crc kubenswrapper[4747]: I0202 08:56:39.280024 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 09:10:52.636743358 +0000 UTC Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.280893 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 01:01:44.972111509 +0000 UTC Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.428997 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.429217 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.431451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.431500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.431513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.436587 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.454796 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.456620 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.456660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.456673 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.461570 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.931660 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.931884 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.933319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.933520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:40 crc kubenswrapper[4747]: I0202 08:56:40.933714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:41 crc kubenswrapper[4747]: I0202 08:56:41.049060 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:41 crc kubenswrapper[4747]: I0202 08:56:41.282092 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 04:44:39.236626612 +0000 UTC Feb 02 08:56:41 crc kubenswrapper[4747]: I0202 08:56:41.457585 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:41 crc kubenswrapper[4747]: I0202 08:56:41.458708 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:41 crc kubenswrapper[4747]: I0202 08:56:41.458737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:41 crc kubenswrapper[4747]: I0202 08:56:41.458747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.267185 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.282628 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 08:35:10.20114968 +0000 UTC Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.459763 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.460565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.460612 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.460629 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.613421 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.613554 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.619056 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 08:56:42 crc kubenswrapper[4747]: I0202 08:56:42.619148 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 08:56:43 crc kubenswrapper[4747]: I0202 08:56:43.141183 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]log ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]etcd ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-api-request-count-filter ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-startkubeinformers ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/generic-apiserver-start-informers ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/priority-and-fairness-config-consumer ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/priority-and-fairness-filter ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-apiextensions-informers ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-apiextensions-controllers ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/crd-informer-synced ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-system-namespaces-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-cluster-authentication-info-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-legacy-token-tracking-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-service-ip-repair-controllers ok Feb 02 08:56:43 crc kubenswrapper[4747]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Feb 02 08:56:43 crc kubenswrapper[4747]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/priority-and-fairness-config-producer ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/bootstrap-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/start-kube-aggregator-informers ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/apiservice-status-local-available-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/apiservice-status-remote-available-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/apiservice-registration-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/apiservice-wait-for-first-sync ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/apiservice-discovery-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/kube-apiserver-autoregistration ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]autoregister-completion ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/apiservice-openapi-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: [+]poststarthook/apiservice-openapiv3-controller ok Feb 02 08:56:43 crc kubenswrapper[4747]: livez check failed Feb 02 08:56:43 crc kubenswrapper[4747]: I0202 08:56:43.141278 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:56:43 crc kubenswrapper[4747]: I0202 08:56:43.283475 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 06:55:13.224583861 +0000 UTC Feb 02 08:56:44 crc kubenswrapper[4747]: I0202 08:56:44.049489 4747 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 08:56:44 crc kubenswrapper[4747]: I0202 08:56:44.049840 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 08:56:44 crc kubenswrapper[4747]: I0202 08:56:44.284497 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 23:17:55.087615868 +0000 UTC Feb 02 08:56:44 crc kubenswrapper[4747]: I0202 08:56:44.411388 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Feb 02 08:56:44 crc kubenswrapper[4747]: I0202 08:56:44.411736 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Feb 02 08:56:44 crc kubenswrapper[4747]: I0202 08:56:44.685163 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Feb 02 08:56:44 crc kubenswrapper[4747]: I0202 08:56:44.685296 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Feb 02 08:56:45 crc kubenswrapper[4747]: I0202 08:56:45.284853 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 16:17:01.791328115 +0000 UTC Feb 02 08:56:46 crc kubenswrapper[4747]: I0202 08:56:46.285600 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 00:49:28.706564507 +0000 UTC Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.285751 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 14:40:14.507218243 +0000 UTC Feb 02 08:56:47 crc kubenswrapper[4747]: E0202 08:56:47.604006 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.609104 4747 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.609742 4747 trace.go:236] Trace[1654649049]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 08:56:37.392) (total time: 10217ms): Feb 02 08:56:47 crc kubenswrapper[4747]: Trace[1654649049]: ---"Objects listed" error: 10217ms (08:56:47.609) Feb 02 08:56:47 crc kubenswrapper[4747]: Trace[1654649049]: [10.217102186s] [10.217102186s] END Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.609911 4747 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.610013 4747 trace.go:236] Trace[1043906968]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 08:56:37.383) (total time: 10226ms): Feb 02 08:56:47 crc kubenswrapper[4747]: Trace[1043906968]: ---"Objects listed" error: 10226ms (08:56:47.609) Feb 02 08:56:47 crc kubenswrapper[4747]: Trace[1043906968]: [10.22619568s] [10.22619568s] END Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.610525 4747 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 08:56:47 crc kubenswrapper[4747]: E0202 08:56:47.610524 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.610083 4747 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.611122 4747 trace.go:236] Trace[123600693]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 08:56:32.909) (total time: 14701ms): Feb 02 08:56:47 crc kubenswrapper[4747]: Trace[123600693]: ---"Objects listed" error: 14700ms (08:56:47.610) Feb 02 08:56:47 crc kubenswrapper[4747]: Trace[123600693]: [14.70122371s] [14.70122371s] END Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.611151 4747 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 08:56:47 crc kubenswrapper[4747]: I0202 08:56:47.630625 4747 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.139804 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.140799 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.140877 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.144767 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.257335 4747 apiserver.go:52] "Watching apiserver" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.261412 4747 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.261869 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.262470 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.262914 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.262997 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.262761 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.263524 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.263183 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.263601 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.263209 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.263125 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.265283 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.266553 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.267755 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.268086 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.268134 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.268663 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.269107 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.269193 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.269231 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.271881 4747 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.285846 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 20:02:41.574697902 +0000 UTC Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314307 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314374 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314426 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314479 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314696 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314755 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314805 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314855 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314904 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314941 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.315026 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:56:48.814995723 +0000 UTC m=+21.359334376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.314990 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315133 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315154 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315191 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315241 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315264 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315283 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315301 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315322 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315340 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315359 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315378 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315397 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315420 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315441 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315460 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315481 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315506 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315523 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315587 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315609 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315634 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315681 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315699 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315748 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315771 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315792 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315812 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315833 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315851 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315868 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315886 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315903 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315922 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315943 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315975 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315997 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316018 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316083 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316103 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316149 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316193 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316212 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316236 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316258 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316282 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316302 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316323 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316344 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316365 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316385 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316409 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316432 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316454 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316472 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316489 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316509 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316529 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316553 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316575 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316593 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316628 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316647 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316665 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316721 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316740 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316761 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316784 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316807 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316825 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316845 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316864 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316883 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316903 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316925 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316956 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316994 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317017 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317037 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317144 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317166 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317187 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317207 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317229 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317298 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317321 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317342 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317364 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317388 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317410 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317434 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317462 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317482 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317501 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317521 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317544 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317568 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317590 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317611 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317630 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317649 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317668 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317688 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317708 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317729 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317751 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317771 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317797 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317817 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317839 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317862 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318189 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318219 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318333 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318358 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318381 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318401 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318424 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318443 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318463 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318480 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318498 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318729 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318759 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318778 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318800 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318827 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318853 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318875 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315323 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315345 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315523 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315551 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.315900 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316006 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316126 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316229 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316209 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316241 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316472 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316528 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316651 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316675 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316729 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316750 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.316808 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317475 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.317903 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318338 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318361 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.318504 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319047 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319268 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319354 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319343 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319439 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319464 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319492 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319516 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319539 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319562 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319586 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319610 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319629 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319647 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319667 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319691 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319716 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319738 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319759 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319780 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319801 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319820 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319839 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319861 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319883 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319904 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319927 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319954 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319998 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320020 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320043 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320066 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320089 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320109 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320132 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320156 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320178 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320200 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320225 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320248 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320270 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320289 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320309 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320331 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320351 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320370 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320393 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320419 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320439 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320460 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320482 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320502 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320523 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320545 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320568 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320597 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321104 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321141 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321169 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321198 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321226 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321249 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321278 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321304 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321330 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321355 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321387 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321412 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321434 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321453 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321549 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321564 4747 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321578 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321591 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321604 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321618 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321631 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321645 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321659 4747 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321673 4747 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321685 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321698 4747 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321710 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321722 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321734 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321746 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321758 4747 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321774 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321788 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321800 4747 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321813 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321829 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321841 4747 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321854 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321712 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321868 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319396 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.319466 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320460 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320545 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.320558 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321272 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321351 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.334286 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321342 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321381 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321508 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.321597 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.322792 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.323263 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.323398 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.323475 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.323866 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.324249 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.324302 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.324320 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.324595 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.324637 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.324997 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.325261 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.325653 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.325705 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.325837 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.325886 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.326001 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.326104 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.326486 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.326579 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.326728 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.326730 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.327275 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.327718 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.328002 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.328895 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329155 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329423 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329423 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329451 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329474 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329836 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329842 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.329862 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.330257 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.330279 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.330296 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.330341 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.330700 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.330925 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.331115 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.331442 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.331517 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.331812 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.332629 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.332631 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333005 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333117 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333278 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333565 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333627 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333707 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333788 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333794 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333900 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.333919 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.334178 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.334429 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.335183 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.335261 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.335663 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.335696 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.336080 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.336287 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.336409 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.336547 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.336575 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.336601 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.336785 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337094 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337156 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337271 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337405 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337509 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337507 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337975 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.337548 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338083 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338038 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338181 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338249 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338294 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338272 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338411 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338556 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338639 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338817 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.338881 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.339468 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.339802 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.340011 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.340016 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.340091 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.340669 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.340734 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.340786 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.341033 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.341068 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.341092 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.340982 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.341132 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.341155 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.341430 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.342065 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.342122 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.342376 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.342577 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:48.842539922 +0000 UTC m=+21.386878545 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.342463 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.342602 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.343541 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.344720 4747 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.344041 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.344376 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.344542 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.345172 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.345294 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:48.845261392 +0000 UTC m=+21.389599825 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.345483 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.345603 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.345919 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.347459 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.347990 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.348267 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.348316 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.349253 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.349356 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.349463 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.349925 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.351399 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.351832 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.353860 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.354335 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.355113 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.355793 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.355951 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.359938 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.360591 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.362150 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.362290 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.362648 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.362838 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.363154 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:48.863112691 +0000 UTC m=+21.407451274 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.363203 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.362753 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.363458 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.363620 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.363524 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.363664 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.363694 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.363521 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.363761 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:48.863740698 +0000 UTC m=+21.408079171 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.364475 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.364875 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.365702 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.368198 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.368320 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.369080 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.369144 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.369996 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.372486 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.372980 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.373089 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.374097 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.374200 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.374475 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.374606 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.374712 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.375031 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.375454 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.375703 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.375804 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.376077 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.376136 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.376363 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.376768 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.376785 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.377639 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.377693 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.377802 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.378158 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.378233 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.378533 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.378664 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.378848 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.379470 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.380464 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.380999 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.382539 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.383751 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.383811 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.383837 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.384724 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.386673 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.387728 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.390855 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.390968 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.392112 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.394116 4747 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.395307 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.400396 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.403015 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.407191 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.411726 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.411994 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.412147 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.413694 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.415480 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.417367 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.418579 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.419667 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.418420 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.421465 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423463 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423580 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423606 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423574 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423784 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423806 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423822 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423835 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423900 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423917 4747 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423929 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423943 4747 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423971 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.423997 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424011 4747 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424023 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424036 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424050 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424062 4747 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424073 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424083 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424095 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424107 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424123 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424135 4747 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424148 4747 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424161 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424167 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424173 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424211 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424322 4747 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424395 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424421 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424433 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424448 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424464 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424480 4747 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424494 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424507 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424520 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424532 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424542 4747 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424553 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424565 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424577 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424589 4747 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424600 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424610 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424623 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424637 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424647 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424659 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424669 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424683 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424695 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424707 4747 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424720 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424733 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424747 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424759 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424772 4747 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424785 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424797 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424810 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424822 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424833 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424845 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424858 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424870 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.424954 4747 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425000 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425023 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425037 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425049 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425064 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425076 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425088 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425099 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425106 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425110 4747 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425245 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425260 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425275 4747 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425287 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425302 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425316 4747 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425333 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425345 4747 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425360 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425374 4747 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425386 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425398 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425412 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425431 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425445 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425458 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425472 4747 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425484 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425497 4747 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425509 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425522 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425536 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425550 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425562 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425573 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425585 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425597 4747 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425609 4747 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425621 4747 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425632 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425644 4747 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425655 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425667 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425680 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425694 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425707 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425718 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425730 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425743 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425754 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425768 4747 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425784 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425799 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425813 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425826 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425840 4747 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425852 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425865 4747 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425877 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425888 4747 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425900 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425914 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425926 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425946 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425982 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.425995 4747 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426008 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426020 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426032 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426045 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426062 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426075 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426089 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426130 4747 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426143 4747 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426155 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426170 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426182 4747 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426194 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426207 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426220 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426231 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426238 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426242 4747 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426279 4747 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426295 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426307 4747 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426319 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426331 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426343 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426355 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426366 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426380 4747 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426392 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426405 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426418 4747 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426431 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426443 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426455 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426466 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426479 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426492 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426504 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426516 4747 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426529 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.426815 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.428627 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.429717 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.430136 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.430800 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.431458 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.432086 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.432303 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e14616700a27e6bc34e76848090ca35339cb2a11eef304a3b4801d5ca6c3cd8c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"message\\\":\\\"W0202 08:56:31.511525 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 08:56:31.511925 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770022591 cert, and key in /tmp/serving-cert-1627992742/serving-signer.crt, /tmp/serving-cert-1627992742/serving-signer.key\\\\nI0202 08:56:31.721852 1 observer_polling.go:159] Starting file observer\\\\nW0202 08:56:31.725961 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 08:56:31.726139 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:31.728052 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1627992742/tls.crt::/tmp/serving-cert-1627992742/tls.key\\\\\\\"\\\\nF0202 08:56:32.124906 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.433452 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.434241 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.435423 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.445384 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.456488 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.470466 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.476633 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.477542 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.479922 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6" exitCode=255 Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.479999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6"} Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.480061 4747 scope.go:117] "RemoveContainer" containerID="e14616700a27e6bc34e76848090ca35339cb2a11eef304a3b4801d5ca6c3cd8c" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.482970 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.489410 4747 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.489740 4747 scope.go:117] "RemoveContainer" containerID="f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.490261 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.495198 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.507858 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e14616700a27e6bc34e76848090ca35339cb2a11eef304a3b4801d5ca6c3cd8c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"message\\\":\\\"W0202 08:56:31.511525 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 08:56:31.511925 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770022591 cert, and key in /tmp/serving-cert-1627992742/serving-signer.crt, /tmp/serving-cert-1627992742/serving-signer.key\\\\nI0202 08:56:31.721852 1 observer_polling.go:159] Starting file observer\\\\nW0202 08:56:31.725961 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 08:56:31.726139 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:31.728052 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1627992742/tls.crt::/tmp/serving-cert-1627992742/tls.key\\\\\\\"\\\\nF0202 08:56:32.124906 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.521545 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.527448 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.543896 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.566569 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.580742 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.589719 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.592686 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: W0202 08:56:48.602388 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-264f0f7ff651b25846ef29712569323132a4a0507832aa2e770c773ec21068e0 WatchSource:0}: Error finding container 264f0f7ff651b25846ef29712569323132a4a0507832aa2e770c773ec21068e0: Status 404 returned error can't find the container with id 264f0f7ff651b25846ef29712569323132a4a0507832aa2e770c773ec21068e0 Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.603107 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.603662 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.611910 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 08:56:48 crc kubenswrapper[4747]: W0202 08:56:48.617138 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-b4b786c653b74c6c6cad8183e8088451a459aeb653d21f69a244c3710f653867 WatchSource:0}: Error finding container b4b786c653b74c6c6cad8183e8088451a459aeb653d21f69a244c3710f653867: Status 404 returned error can't find the container with id b4b786c653b74c6c6cad8183e8088451a459aeb653d21f69a244c3710f653867 Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.620455 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e14616700a27e6bc34e76848090ca35339cb2a11eef304a3b4801d5ca6c3cd8c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"message\\\":\\\"W0202 08:56:31.511525 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 08:56:31.511925 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770022591 cert, and key in /tmp/serving-cert-1627992742/serving-signer.crt, /tmp/serving-cert-1627992742/serving-signer.key\\\\nI0202 08:56:31.721852 1 observer_polling.go:159] Starting file observer\\\\nW0202 08:56:31.725961 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 08:56:31.726139 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:31.728052 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1627992742/tls.crt::/tmp/serving-cert-1627992742/tls.key\\\\\\\"\\\\nF0202 08:56:32.124906 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: W0202 08:56:48.631141 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-e4445f91f76e81e65a664f0422b2a16f3435617974907dbcbeff17b84efda050 WatchSource:0}: Error finding container e4445f91f76e81e65a664f0422b2a16f3435617974907dbcbeff17b84efda050: Status 404 returned error can't find the container with id e4445f91f76e81e65a664f0422b2a16f3435617974907dbcbeff17b84efda050 Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.640902 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.652680 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.664434 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.829607 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.829821 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:56:49.82979694 +0000 UTC m=+22.374135373 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.930387 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.930427 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.930448 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:48 crc kubenswrapper[4747]: I0202 08:56:48.930468 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930579 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930604 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930619 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930620 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930697 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:49.930684446 +0000 UTC m=+22.475022879 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930585 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930720 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:49.930714637 +0000 UTC m=+22.475053070 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930740 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:49.930732878 +0000 UTC m=+22.475071311 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930745 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930801 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930857 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:48 crc kubenswrapper[4747]: E0202 08:56:48.930945 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:49.930917772 +0000 UTC m=+22.475256205 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.286929 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 00:50:52.114285577 +0000 UTC Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.339420 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.339561 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.483508 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.489403 4747 scope.go:117] "RemoveContainer" containerID="f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6" Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.489692 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.490138 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a"} Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.490197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e4445f91f76e81e65a664f0422b2a16f3435617974907dbcbeff17b84efda050"} Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.491432 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b4b786c653b74c6c6cad8183e8088451a459aeb653d21f69a244c3710f653867"} Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.493032 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7"} Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.493071 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7"} Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.493086 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"264f0f7ff651b25846ef29712569323132a4a0507832aa2e770c773ec21068e0"} Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.500000 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.510197 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.519954 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.531068 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.547373 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.560547 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.572022 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.584072 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.594589 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.604357 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.616341 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.639782 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.656195 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.669299 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.840733 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.841017 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:56:51.8409779 +0000 UTC m=+24.385316393 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.942423 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.942504 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.942568 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:49 crc kubenswrapper[4747]: I0202 08:56:49.942626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.942886 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.942920 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.942979 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.942994 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943020 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943042 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:51.943022206 +0000 UTC m=+24.487360639 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943057 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943101 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943001 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943104 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:51.943078968 +0000 UTC m=+24.487417441 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943331 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:51.943271212 +0000 UTC m=+24.487609645 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:49 crc kubenswrapper[4747]: E0202 08:56:49.943371 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:51.943360795 +0000 UTC m=+24.487699368 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.287733 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 07:57:32.008312042 +0000 UTC Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.338501 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.338573 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:50 crc kubenswrapper[4747]: E0202 08:56:50.338709 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:56:50 crc kubenswrapper[4747]: E0202 08:56:50.338874 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.344965 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.345519 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.346327 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.347036 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.347605 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.348357 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.349216 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.349816 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.350438 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.350922 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.351630 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.963377 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.980746 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.982233 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.984234 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 02 08:56:50 crc kubenswrapper[4747]: I0202 08:56:50.999112 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.011703 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.024402 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.037785 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.050004 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.055875 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.058896 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.065741 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.074029 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.075871 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.094682 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.106531 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.119516 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.132289 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.149602 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.162479 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.174843 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.288856 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 06:24:50.9952859 +0000 UTC Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.338905 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.339054 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.499255 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403"} Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.507076 4747 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.519657 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.538617 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.552691 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.566189 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.589344 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.604115 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.617687 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.638702 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.665736 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:51Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.856762 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.857005 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:56:55.856967605 +0000 UTC m=+28.401306088 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.957495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.957563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.957604 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:51 crc kubenswrapper[4747]: I0202 08:56:51.957643 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957766 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957770 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957820 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957838 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957846 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957796 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957923 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957925 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:55.957902983 +0000 UTC m=+28.502241446 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.958112 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:55.958069427 +0000 UTC m=+28.502408090 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.958143 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:55.958132628 +0000 UTC m=+28.502471311 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.957910 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:51 crc kubenswrapper[4747]: E0202 08:56:51.958244 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:56:55.958234181 +0000 UTC m=+28.502572834 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.289828 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 07:35:07.024055519 +0000 UTC Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.338559 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.338598 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:52 crc kubenswrapper[4747]: E0202 08:56:52.338681 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:56:52 crc kubenswrapper[4747]: E0202 08:56:52.338832 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.375417 4747 csr.go:261] certificate signing request csr-fdlqz is approved, waiting to be issued Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.388300 4747 csr.go:257] certificate signing request csr-fdlqz is issued Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.485230 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-gjwhc"] Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.485557 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.487129 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.487432 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.487524 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.510668 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.521897 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.533708 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.546575 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.556871 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.564190 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a1c5e693-ed04-4480-8159-91dc592b7edb-hosts-file\") pod \"node-resolver-gjwhc\" (UID: \"a1c5e693-ed04-4480-8159-91dc592b7edb\") " pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.564335 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vpz7\" (UniqueName: \"kubernetes.io/projected/a1c5e693-ed04-4480-8159-91dc592b7edb-kube-api-access-4vpz7\") pod \"node-resolver-gjwhc\" (UID: \"a1c5e693-ed04-4480-8159-91dc592b7edb\") " pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.567823 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.581638 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.597115 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.609566 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.624991 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.665253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a1c5e693-ed04-4480-8159-91dc592b7edb-hosts-file\") pod \"node-resolver-gjwhc\" (UID: \"a1c5e693-ed04-4480-8159-91dc592b7edb\") " pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.665351 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vpz7\" (UniqueName: \"kubernetes.io/projected/a1c5e693-ed04-4480-8159-91dc592b7edb-kube-api-access-4vpz7\") pod \"node-resolver-gjwhc\" (UID: \"a1c5e693-ed04-4480-8159-91dc592b7edb\") " pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.665386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a1c5e693-ed04-4480-8159-91dc592b7edb-hosts-file\") pod \"node-resolver-gjwhc\" (UID: \"a1c5e693-ed04-4480-8159-91dc592b7edb\") " pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.682823 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vpz7\" (UniqueName: \"kubernetes.io/projected/a1c5e693-ed04-4480-8159-91dc592b7edb-kube-api-access-4vpz7\") pod \"node-resolver-gjwhc\" (UID: \"a1c5e693-ed04-4480-8159-91dc592b7edb\") " pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.796746 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gjwhc" Feb 02 08:56:52 crc kubenswrapper[4747]: W0202 08:56:52.809985 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1c5e693_ed04_4480_8159_91dc592b7edb.slice/crio-a7458c6229f547ff8cb008e14436d08da0ca202ad927339466b14673093f5f43 WatchSource:0}: Error finding container a7458c6229f547ff8cb008e14436d08da0ca202ad927339466b14673093f5f43: Status 404 returned error can't find the container with id a7458c6229f547ff8cb008e14436d08da0ca202ad927339466b14673093f5f43 Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.881416 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-g8f8b"] Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.881818 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.885898 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.886076 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.883969 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.887104 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.887773 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.925602 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.938431 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.950827 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.964636 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.966521 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-rootfs\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.966580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b92ss\" (UniqueName: \"kubernetes.io/projected/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-kube-api-access-b92ss\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.966665 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-mcd-auth-proxy-config\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.966692 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-proxy-tls\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.977265 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:52 crc kubenswrapper[4747]: I0202 08:56:52.992756 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:52Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.004690 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.018821 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.038221 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.051563 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.064448 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.067746 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-mcd-auth-proxy-config\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.067784 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-proxy-tls\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.067808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-rootfs\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.067830 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b92ss\" (UniqueName: \"kubernetes.io/projected/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-kube-api-access-b92ss\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.067995 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-rootfs\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.068514 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-mcd-auth-proxy-config\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.070847 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-proxy-tls\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.085744 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b92ss\" (UniqueName: \"kubernetes.io/projected/1fadfd97-1567-40c1-a5e7-98ed7e3d67d6-kube-api-access-b92ss\") pod \"machine-config-daemon-g8f8b\" (UID: \"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\") " pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.193792 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 08:56:53 crc kubenswrapper[4747]: W0202 08:56:53.203760 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fadfd97_1567_40c1_a5e7_98ed7e3d67d6.slice/crio-52cd04503907dfeb25397512853d3e21329cbdc80d4c4d089286211caffa0ef3 WatchSource:0}: Error finding container 52cd04503907dfeb25397512853d3e21329cbdc80d4c4d089286211caffa0ef3: Status 404 returned error can't find the container with id 52cd04503907dfeb25397512853d3e21329cbdc80d4c4d089286211caffa0ef3 Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.269399 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-pvnm7"] Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.269635 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-vjrcp"] Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.269760 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.271333 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7782"] Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.271881 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.272420 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.272426 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.272613 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.273059 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.274013 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.274491 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.274505 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.277028 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.277861 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.277867 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.278167 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.278557 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.279135 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.279261 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.280066 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.290438 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 07:11:51.562001687 +0000 UTC Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.298331 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.312317 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.328554 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.338530 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:53 crc kubenswrapper[4747]: E0202 08:56:53.338671 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.345858 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.366670 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370163 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-hostroot\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370207 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370229 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx6b6\" (UniqueName: \"kubernetes.io/projected/313d57a3-8403-49fb-b300-c82ec66f4339-kube-api-access-bx6b6\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370278 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-config\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370300 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-etc-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370410 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-os-release\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370487 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-netns\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370525 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-cni-bin\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370551 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-script-lib\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370568 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-daemon-config\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370588 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/313d57a3-8403-49fb-b300-c82ec66f4339-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370608 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-slash\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370628 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-netd\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370657 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg9c5\" (UniqueName: \"kubernetes.io/projected/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-kube-api-access-kg9c5\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370685 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-bin\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370705 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-env-overrides\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370736 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-node-log\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370749 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-kubelet\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370766 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-cnibin\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370781 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-socket-dir-parent\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370822 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-k8s-cni-cncf-io\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370856 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-cni-multus\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370873 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370893 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc4tt\" (UniqueName: \"kubernetes.io/projected/b3dfe801-a30e-4352-bec1-869e46ad5f0a-kube-api-access-kc4tt\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370916 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-cni-binary-copy\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.370936 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-multus-certs\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371010 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-netns\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371027 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-log-socket\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371049 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-ovn\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371064 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovn-node-metrics-cert\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371080 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-systemd\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371094 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-var-lib-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371111 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371126 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-cnibin\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371156 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-os-release\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371189 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-kubelet\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371209 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-systemd-units\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371228 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371245 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-etc-kubernetes\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371261 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-conf-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371277 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-system-cni-dir\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371293 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/313d57a3-8403-49fb-b300-c82ec66f4339-cni-binary-copy\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371320 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-system-cni-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.371335 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-cni-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.389438 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-02 08:51:52 +0000 UTC, rotation deadline is 2026-12-07 13:09:08.02093914 +0000 UTC Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.389535 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7396h12m14.631407958s for next certificate rotation Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.391374 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.424245 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.466690 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472034 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-netns\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472080 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-log-socket\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472103 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-cni-binary-copy\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472130 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-multus-certs\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472154 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovn-node-metrics-cert\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472162 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-netns\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472192 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-log-socket\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472223 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-ovn\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472189 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-ovn\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472207 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-multus-certs\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472266 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472290 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472293 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-cnibin\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472317 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-cnibin\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472329 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-systemd\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472358 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-var-lib-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-kubelet\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472385 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-systemd\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472400 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-systemd-units\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472420 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-os-release\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472440 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-kubelet\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472444 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472463 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-etc-kubernetes\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-system-cni-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472506 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-cni-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-conf-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472535 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-system-cni-dir\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472551 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/313d57a3-8403-49fb-b300-c82ec66f4339-cni-binary-copy\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472602 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-config\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472618 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-hostroot\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472635 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472650 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx6b6\" (UniqueName: \"kubernetes.io/projected/313d57a3-8403-49fb-b300-c82ec66f4339-kube-api-access-bx6b6\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472666 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-os-release\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472670 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-etc-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472696 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-etc-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472703 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-conf-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472717 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-os-release\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472723 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-cni-binary-copy\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472420 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-var-lib-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472735 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-netns\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-system-cni-dir\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472752 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-cni-bin\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472766 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-openvswitch\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-etc-kubernetes\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472835 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-systemd-units\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472852 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-system-cni-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472880 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-script-lib\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472901 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-daemon-config\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472928 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-hostroot\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.473501 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-config\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.473524 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-script-lib\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.472882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-cni-dir\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.473597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/313d57a3-8403-49fb-b300-c82ec66f4339-cni-binary-copy\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.473748 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-netns\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.473789 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-os-release\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474031 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-slash\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474068 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-netd\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474100 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg9c5\" (UniqueName: \"kubernetes.io/projected/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-kube-api-access-kg9c5\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474116 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/313d57a3-8403-49fb-b300-c82ec66f4339-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474138 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-netd\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474153 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-slash\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474155 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-daemon-config\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474190 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-cni-bin\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474191 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/313d57a3-8403-49fb-b300-c82ec66f4339-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474231 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-bin\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474263 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-bin\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474269 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-env-overrides\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474373 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-node-log\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474405 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-kubelet\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474426 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-node-log\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474458 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-kubelet\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474478 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-socket-dir-parent\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474505 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-k8s-cni-cncf-io\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474524 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-multus-socket-dir-parent\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-cni-multus\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474591 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-run-k8s-cni-cncf-io\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474641 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-cnibin\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474669 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-host-var-lib-cni-multus\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474678 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-cnibin\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474694 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474714 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.474727 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc4tt\" (UniqueName: \"kubernetes.io/projected/b3dfe801-a30e-4352-bec1-869e46ad5f0a-kube-api-access-kc4tt\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.475158 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-env-overrides\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.476131 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/313d57a3-8403-49fb-b300-c82ec66f4339-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.477414 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovn-node-metrics-cert\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.511633 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx6b6\" (UniqueName: \"kubernetes.io/projected/313d57a3-8403-49fb-b300-c82ec66f4339-kube-api-access-bx6b6\") pod \"multus-additional-cni-plugins-vjrcp\" (UID: \"313d57a3-8403-49fb-b300-c82ec66f4339\") " pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.519282 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.528485 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc4tt\" (UniqueName: \"kubernetes.io/projected/b3dfe801-a30e-4352-bec1-869e46ad5f0a-kube-api-access-kc4tt\") pod \"ovnkube-node-p7782\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.528696 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gjwhc" event={"ID":"a1c5e693-ed04-4480-8159-91dc592b7edb","Type":"ContainerStarted","Data":"bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4"} Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.528727 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gjwhc" event={"ID":"a1c5e693-ed04-4480-8159-91dc592b7edb","Type":"ContainerStarted","Data":"a7458c6229f547ff8cb008e14436d08da0ca202ad927339466b14673093f5f43"} Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.532630 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg9c5\" (UniqueName: \"kubernetes.io/projected/b1945e48-8aba-4a55-8dce-18e4a87ce4c5-kube-api-access-kg9c5\") pod \"multus-pvnm7\" (UID: \"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\") " pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.536190 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c"} Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.536235 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c"} Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.536248 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"52cd04503907dfeb25397512853d3e21329cbdc80d4c4d089286211caffa0ef3"} Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.551841 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.569114 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.581213 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.582332 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pvnm7" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.588560 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" Feb 02 08:56:53 crc kubenswrapper[4747]: W0202 08:56:53.594437 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1945e48_8aba_4a55_8dce_18e4a87ce4c5.slice/crio-6d9a9c9aa1b606ff9dad7dfe2000ae8fd6d4eb7817aef4b93da7b718dc0ae244 WatchSource:0}: Error finding container 6d9a9c9aa1b606ff9dad7dfe2000ae8fd6d4eb7817aef4b93da7b718dc0ae244: Status 404 returned error can't find the container with id 6d9a9c9aa1b606ff9dad7dfe2000ae8fd6d4eb7817aef4b93da7b718dc0ae244 Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.597253 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.597582 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:56:53 crc kubenswrapper[4747]: W0202 08:56:53.601332 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod313d57a3_8403_49fb_b300_c82ec66f4339.slice/crio-ed9a3b4b745cf3d3b2c4b2158bd088685b4fbe998045f29b35053c9be23b190c WatchSource:0}: Error finding container ed9a3b4b745cf3d3b2c4b2158bd088685b4fbe998045f29b35053c9be23b190c: Status 404 returned error can't find the container with id ed9a3b4b745cf3d3b2c4b2158bd088685b4fbe998045f29b35053c9be23b190c Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.619549 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.629697 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.649689 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.664037 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.675755 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.698878 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.730372 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.747161 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.758999 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.773235 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.787992 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.799165 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:53 crc kubenswrapper[4747]: I0202 08:56:53.811767 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:53Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.011884 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.013371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.013449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.013464 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.013575 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.021645 4747 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.021962 4747 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.023076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.023135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.023153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.023178 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.023192 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.043134 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.047584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.047655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.047671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.047695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.047713 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.060579 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.065019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.065061 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.065074 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.065092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.065107 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.080596 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.085400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.085445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.085456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.085476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.085488 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.099454 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.103751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.103789 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.103798 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.103814 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.103824 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.117713 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.117835 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.119568 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.119595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.119607 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.119625 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.119634 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.221737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.221776 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.221786 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.221799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.221807 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.291462 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 01:58:37.037606518 +0000 UTC Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.324290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.324331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.324346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.324365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.324380 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.338785 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.338847 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.338951 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.339084 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.411354 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.412501 4747 scope.go:117] "RemoveContainer" containerID="f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6" Feb 02 08:56:54 crc kubenswrapper[4747]: E0202 08:56:54.412801 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.427131 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.427166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.427177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.427192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.427203 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.529507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.529548 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.529557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.529569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.529580 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.539445 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad" exitCode=0 Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.539514 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.539546 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"d7ef61205d215896237bde40b95364cf02d3cb48d352942dbbfb38d36315cbb9"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.541554 4747 generic.go:334] "Generic (PLEG): container finished" podID="313d57a3-8403-49fb-b300-c82ec66f4339" containerID="a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881" exitCode=0 Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.541609 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerDied","Data":"a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.541645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerStarted","Data":"ed9a3b4b745cf3d3b2c4b2158bd088685b4fbe998045f29b35053c9be23b190c"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.543129 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerStarted","Data":"feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.543153 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerStarted","Data":"6d9a9c9aa1b606ff9dad7dfe2000ae8fd6d4eb7817aef4b93da7b718dc0ae244"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.553336 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.564740 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.578706 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.594418 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.609239 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.620791 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.633010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.633044 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.633056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.633072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.633087 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.640995 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.654159 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.662884 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.674924 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.687720 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.704329 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.714965 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.727127 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.736079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.736113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.736124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.736140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.736165 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.740040 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.754719 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.764902 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.778626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.798967 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.810315 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.835750 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.839002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.839035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.839046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.839062 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.839073 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.847657 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.856436 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.868663 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.880742 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.899783 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.914120 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.925854 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:54Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.941486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.941514 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.941523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.941536 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:54 crc kubenswrapper[4747]: I0202 08:56:54.941544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:54Z","lastTransitionTime":"2026-02-02T08:56:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.043271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.043299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.043307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.043319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.043328 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.146201 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.146563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.146574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.146590 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.146602 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.249117 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.249326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.249412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.249505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.249597 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.292350 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 14:37:38.511916022 +0000 UTC Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.338382 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.338510 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.352252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.352295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.352303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.352319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.352329 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.454699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.454735 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.454746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.454763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.454778 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.550591 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.550643 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.550656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.550670 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.550690 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.550700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.552883 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerStarted","Data":"ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.556794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.556836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.556847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.556866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.556877 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.566413 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.579049 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.589783 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.610400 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.621894 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.631712 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.642830 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.653658 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.659323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.659366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.659378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.659468 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.659501 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.674159 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.684291 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.694769 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.704912 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.716485 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.727424 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.762124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.762179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.762191 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.762208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.762221 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.859817 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-v96w7"] Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.860148 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.861925 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.861925 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.862253 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.862350 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.864618 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.864664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.864679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.864700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.864714 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.873790 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.886702 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.896684 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.896875 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:57:03.89685035 +0000 UTC m=+36.441188843 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.897032 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.907862 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.917988 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.926763 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.934784 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.954035 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.964322 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.966759 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.966791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.966802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.966821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.966836 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:55Z","lastTransitionTime":"2026-02-02T08:56:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.974436 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.986446 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.996437 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:55Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.997733 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.997777 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.997798 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.997820 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf78p\" (UniqueName: \"kubernetes.io/projected/10256a9d-49d9-40ff-ba5d-dd4a6a372593-kube-api-access-cf78p\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.997839 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/10256a9d-49d9-40ff-ba5d-dd4a6a372593-serviceca\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.997848 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.997857 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:55 crc kubenswrapper[4747]: I0202 08:56:55.997874 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10256a9d-49d9-40ff-ba5d-dd4a6a372593-host\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.997899 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:03.997885739 +0000 UTC m=+36.542224172 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.997911 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.997985 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:03.997971062 +0000 UTC m=+36.542309495 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998000 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998030 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998044 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998053 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998069 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998081 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998095 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:03.998079734 +0000 UTC m=+36.542418167 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:55 crc kubenswrapper[4747]: E0202 08:56:55.998116 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:03.998107395 +0000 UTC m=+36.542445948 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.007291 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.018589 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.044467 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.069593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.069631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.069643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.069660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.069671 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.099307 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf78p\" (UniqueName: \"kubernetes.io/projected/10256a9d-49d9-40ff-ba5d-dd4a6a372593-kube-api-access-cf78p\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.099350 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/10256a9d-49d9-40ff-ba5d-dd4a6a372593-serviceca\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.099386 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10256a9d-49d9-40ff-ba5d-dd4a6a372593-host\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.099464 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10256a9d-49d9-40ff-ba5d-dd4a6a372593-host\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.100472 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/10256a9d-49d9-40ff-ba5d-dd4a6a372593-serviceca\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.116688 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf78p\" (UniqueName: \"kubernetes.io/projected/10256a9d-49d9-40ff-ba5d-dd4a6a372593-kube-api-access-cf78p\") pod \"node-ca-v96w7\" (UID: \"10256a9d-49d9-40ff-ba5d-dd4a6a372593\") " pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.172726 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.172758 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.172768 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.172780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.172789 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.178045 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-v96w7" Feb 02 08:56:56 crc kubenswrapper[4747]: W0202 08:56:56.190739 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10256a9d_49d9_40ff_ba5d_dd4a6a372593.slice/crio-d1d08bc07c94dcc136e9bc2a2571ccfe1333f84ed69203cfdb29325c20260b55 WatchSource:0}: Error finding container d1d08bc07c94dcc136e9bc2a2571ccfe1333f84ed69203cfdb29325c20260b55: Status 404 returned error can't find the container with id d1d08bc07c94dcc136e9bc2a2571ccfe1333f84ed69203cfdb29325c20260b55 Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.274890 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.274938 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.274973 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.274992 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.275007 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.293177 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 14:32:51.549678028 +0000 UTC Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.338374 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.338400 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:56 crc kubenswrapper[4747]: E0202 08:56:56.338530 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:56:56 crc kubenswrapper[4747]: E0202 08:56:56.338626 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.379147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.379182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.379192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.379208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.379221 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.481025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.481063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.481076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.481091 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.481103 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.558335 4747 generic.go:334] "Generic (PLEG): container finished" podID="313d57a3-8403-49fb-b300-c82ec66f4339" containerID="ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068" exitCode=0 Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.558425 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerDied","Data":"ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.560523 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-v96w7" event={"ID":"10256a9d-49d9-40ff-ba5d-dd4a6a372593","Type":"ContainerStarted","Data":"fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.560660 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-v96w7" event={"ID":"10256a9d-49d9-40ff-ba5d-dd4a6a372593","Type":"ContainerStarted","Data":"d1d08bc07c94dcc136e9bc2a2571ccfe1333f84ed69203cfdb29325c20260b55"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.575330 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.583985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.584373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.584386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.584410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.584428 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.595029 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.608200 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.621319 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.634532 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.647973 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.658123 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.679138 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.687313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.687352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.687363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.687379 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.687389 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.691344 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.702227 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.718003 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.730631 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.743629 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.754563 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.770810 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.786094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.788914 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.788951 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.788959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.788971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.788981 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.798911 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.809270 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.818760 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.830838 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.842772 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.852400 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.864626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.879625 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.889228 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.890705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.890740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.890750 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.890767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.890778 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.898869 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.911880 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.922796 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.942909 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.957114 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:56Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.992772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.992815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.992826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.992843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:56 crc kubenswrapper[4747]: I0202 08:56:56.992856 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:56Z","lastTransitionTime":"2026-02-02T08:56:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.095008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.095095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.095126 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.095145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.095157 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.197819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.197847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.197857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.197871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.197882 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.293313 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 02:24:07.643199194 +0000 UTC Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.300728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.300774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.300785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.300801 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.300812 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.338622 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:57 crc kubenswrapper[4747]: E0202 08:56:57.338793 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.403414 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.403448 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.403456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.403470 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.403482 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.506574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.506626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.506667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.506687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.506699 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.566732 4747 generic.go:334] "Generic (PLEG): container finished" podID="313d57a3-8403-49fb-b300-c82ec66f4339" containerID="1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4" exitCode=0 Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.566794 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerDied","Data":"1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.580748 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.609213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.609256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.609270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.609289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.609305 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.626237 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.669129 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.681090 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.694312 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.705914 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.711394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.711445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.711457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.711474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.711485 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.721655 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.740813 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.751582 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.762970 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.773658 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.783789 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.793166 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.805087 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.815285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.815332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.815344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.815361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.815373 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.818577 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:57Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.917569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.917608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.917619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.917634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:57 crc kubenswrapper[4747]: I0202 08:56:57.917646 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:57Z","lastTransitionTime":"2026-02-02T08:56:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.019647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.019694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.019705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.019721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.019733 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.079394 4747 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.121421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.121633 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.121698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.121763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.121818 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.224125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.224165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.224176 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.224193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.224207 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.294397 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 00:32:20.272908153 +0000 UTC Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.326404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.326432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.326441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.326453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.326461 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.338807 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:56:58 crc kubenswrapper[4747]: E0202 08:56:58.339032 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.339555 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:56:58 crc kubenswrapper[4747]: E0202 08:56:58.339697 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.354867 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.377746 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.394309 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.427077 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.428284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.428322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.428338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.428358 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.428373 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.440557 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.453342 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.469919 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.484390 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.496760 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.508416 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.521156 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.529921 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.530699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.530744 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.530752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.530765 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.530775 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.537724 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.550248 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.571360 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.574214 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.576783 4747 generic.go:334] "Generic (PLEG): container finished" podID="313d57a3-8403-49fb-b300-c82ec66f4339" containerID="55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2" exitCode=0 Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.576867 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerDied","Data":"55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.590495 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.602434 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.614655 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.632706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.632742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.632752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.632815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.632827 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.634146 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.646230 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.659157 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.669430 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.680159 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.690264 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.701540 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.711318 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.728735 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.735246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.735273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.735282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.735294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.735302 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.738766 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.747648 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.760171 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:58Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.837742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.837776 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.837789 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.837801 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.837810 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.939798 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.940123 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.940139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.940157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:58 crc kubenswrapper[4747]: I0202 08:56:58.940170 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:58Z","lastTransitionTime":"2026-02-02T08:56:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.042153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.042193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.042204 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.042220 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.042232 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.145177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.145256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.145280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.145310 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.145335 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.248036 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.248097 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.248108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.248128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.248139 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.295142 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 11:11:47.193783775 +0000 UTC Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.339048 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:56:59 crc kubenswrapper[4747]: E0202 08:56:59.339186 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.350417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.350454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.350466 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.350523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.350536 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.452873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.452955 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.452974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.453002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.453020 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.556421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.556485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.556512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.556543 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.556567 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.583147 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerStarted","Data":"82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.600312 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.616541 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.634575 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.652689 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.658654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.658759 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.658783 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.658808 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.658826 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.670077 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.682430 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.694884 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.727034 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.744264 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.756298 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.761642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.761693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.761707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.761725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.761737 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.774426 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.791287 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.806500 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.821608 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.844799 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:56:59Z is after 2025-08-24T17:21:41Z" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.863718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.863808 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.863829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.863850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.863868 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.967204 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.967272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.967296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.967323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:56:59 crc kubenswrapper[4747]: I0202 08:56:59.967341 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:56:59Z","lastTransitionTime":"2026-02-02T08:56:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.069792 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.069845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.069860 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.069881 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.069897 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.173460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.173492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.173507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.173524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.173542 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.277235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.277285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.277297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.277313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.277327 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.296026 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 17:25:02.990500159 +0000 UTC Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.339539 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.339626 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:00 crc kubenswrapper[4747]: E0202 08:57:00.339771 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:00 crc kubenswrapper[4747]: E0202 08:57:00.340039 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.380108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.380147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.380157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.380173 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.380185 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.482639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.482682 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.482696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.482711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.482722 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.585123 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.585176 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.585193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.585215 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.585234 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.590099 4747 generic.go:334] "Generic (PLEG): container finished" podID="313d57a3-8403-49fb-b300-c82ec66f4339" containerID="82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d" exitCode=0 Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.590201 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerDied","Data":"82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.601875 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.603115 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.603173 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.618365 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.639241 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.644609 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.645606 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.652469 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.663572 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.684586 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.690173 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.690230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.690246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.690266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.690280 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.702491 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.715773 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.734120 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.749929 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.767981 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.785533 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.792454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.792490 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.792499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.792512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.792520 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.804329 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.816428 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.837960 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.849484 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.859496 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.868950 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.880475 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.894308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.894347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.894358 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.894372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.894382 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.898023 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.908602 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.920179 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.931868 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.948979 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.965111 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.976279 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.988583 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.996396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.996424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.996433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.996445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:00 crc kubenswrapper[4747]: I0202 08:57:00.996453 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:00Z","lastTransitionTime":"2026-02-02T08:57:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.000434 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:00Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.010975 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.019283 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.029702 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.098508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.098558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.098572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.098589 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.098600 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.201393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.201455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.201472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.201492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.201504 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.297159 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 10:05:29.408945629 +0000 UTC Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.304317 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.304359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.304367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.304413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.304424 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.339177 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:01 crc kubenswrapper[4747]: E0202 08:57:01.339390 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.410024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.410083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.410095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.410115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.410131 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.512962 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.513001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.513010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.513023 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.513034 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.609106 4747 generic.go:334] "Generic (PLEG): container finished" podID="313d57a3-8403-49fb-b300-c82ec66f4339" containerID="7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5" exitCode=0 Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.609198 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerDied","Data":"7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.609254 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.614809 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.614844 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.615443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.615465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.615477 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.623923 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.643429 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.671675 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.684622 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.698681 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.713294 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.717772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.717822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.717831 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.717845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.717876 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.728278 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.746134 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.759456 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.773118 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.784711 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.796352 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.807400 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.820861 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.820899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.820910 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.820927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.820953 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.821124 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.834924 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:01Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.923639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.923679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.923688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.923702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:01 crc kubenswrapper[4747]: I0202 08:57:01.923713 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:01Z","lastTransitionTime":"2026-02-02T08:57:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.026608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.026649 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.026659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.026671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.026680 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.129911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.130092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.130125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.130153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.130172 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.232697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.232751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.232802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.232847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.232887 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.298080 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 03:20:26.937407308 +0000 UTC Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.334998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.335041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.335054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.335069 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.335080 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.339383 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.339415 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:02 crc kubenswrapper[4747]: E0202 08:57:02.339563 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:02 crc kubenswrapper[4747]: E0202 08:57:02.339709 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.438605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.438714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.438724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.438741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.438754 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.553243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.553289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.553301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.553319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.553334 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.614876 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.615885 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" event={"ID":"313d57a3-8403-49fb-b300-c82ec66f4339","Type":"ContainerStarted","Data":"19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.627892 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.641292 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.653534 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.655315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.655339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.655355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.655367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.655377 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.668821 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.684976 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.703000 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.712646 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.733154 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.753380 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.757071 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.757110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.757122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.757138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.757155 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.769547 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.785447 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.796438 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.813132 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.823667 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.838320 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:02Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.858821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.858863 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.858874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.858890 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.858902 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.961208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.961256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.961267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.961285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:02 crc kubenswrapper[4747]: I0202 08:57:02.961296 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:02Z","lastTransitionTime":"2026-02-02T08:57:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.063603 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.063644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.063654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.063671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.063681 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.165239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.165262 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.165270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.165290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.165298 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.267257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.267620 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.267638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.267658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.267671 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.298847 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 17:49:44.780983965 +0000 UTC Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.338401 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:03 crc kubenswrapper[4747]: E0202 08:57:03.338556 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.370024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.370059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.370068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.370082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.370093 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.475136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.475185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.475195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.475210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.475220 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.577299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.577355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.577368 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.577389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.577404 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.620037 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/0.log" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.622242 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c" exitCode=1 Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.622287 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.623159 4747 scope.go:117] "RemoveContainer" containerID="f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.648879 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.659918 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.668677 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.679212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.679248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.679257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.679271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.679281 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.683818 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.695172 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.706883 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.720327 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.739192 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.753867 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.767973 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.782023 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.782072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.782089 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.782111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.782125 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.784813 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.797602 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.814912 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.825146 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.833413 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:03Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.884278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.884346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.884358 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.884377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.884389 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.984034 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:57:03 crc kubenswrapper[4747]: E0202 08:57:03.984300 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:57:19.984267125 +0000 UTC m=+52.528605578 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.986714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.986763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.986777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.986799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:03 crc kubenswrapper[4747]: I0202 08:57:03.986816 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:03Z","lastTransitionTime":"2026-02-02T08:57:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.085437 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.085498 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.085531 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.085558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085645 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085676 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085687 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085709 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085722 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085736 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:20.085713655 +0000 UTC m=+52.630052188 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085685 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085756 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:20.085746486 +0000 UTC m=+52.630084919 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085762 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085770 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085771 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:20.085762926 +0000 UTC m=+52.630101479 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.085821 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:20.085806457 +0000 UTC m=+52.630144890 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.089006 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.089039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.089051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.089068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.089079 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.191240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.191276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.191285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.191302 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.191314 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.293653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.293709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.293728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.293755 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.293777 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.299980 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 15:13:43.081225925 +0000 UTC Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.339043 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.339100 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.339172 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.339318 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.395851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.395916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.395928 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.395966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.395977 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.436836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.436887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.436896 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.436911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.436921 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.449318 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.452374 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.452401 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.452410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.452423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.452431 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.469715 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.473764 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.473799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.473812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.473827 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.473838 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.491446 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.495437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.495512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.495545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.495572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.495589 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.512637 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.516710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.516757 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.516767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.516780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.516789 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.533002 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: E0202 08:57:04.533134 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.534546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.534587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.534597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.534608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.534617 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.628697 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/0.log" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.633320 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.633654 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.638431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.638634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.638760 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.638877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.639042 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.665796 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.682468 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.700781 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.721274 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.732414 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.741644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.741696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.741712 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.741729 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.741740 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.744426 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.757227 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.782217 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.794190 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.804218 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.814521 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.844257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.844289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.844299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.844314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.844324 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.857137 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.869423 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.879138 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.894643 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:04Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.946365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.946411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.946423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.946439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:04 crc kubenswrapper[4747]: I0202 08:57:04.946451 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:04Z","lastTransitionTime":"2026-02-02T08:57:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.049281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.049338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.049355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.049379 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.049395 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.153045 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.153115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.153140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.153164 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.153191 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.256234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.256285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.256298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.256315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.256328 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.300100 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 18:01:28.866547101 +0000 UTC Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.339095 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:05 crc kubenswrapper[4747]: E0202 08:57:05.339348 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.359837 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.359901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.359919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.359986 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.360004 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.463664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.463732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.463760 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.463789 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.463808 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.565780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.565851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.565867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.565881 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.565895 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.638032 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/1.log" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.639083 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/0.log" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.643271 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79" exitCode=1 Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.643366 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.643421 4747 scope.go:117] "RemoveContainer" containerID="f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.645909 4747 scope.go:117] "RemoveContainer" containerID="9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79" Feb 02 08:57:05 crc kubenswrapper[4747]: E0202 08:57:05.646172 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.660605 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.667839 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.667889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.667902 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.667921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.667968 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.673926 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.689598 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.703188 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.714694 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.731200 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.749715 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.762844 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.771088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.771188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.771217 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.771254 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.771279 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.775411 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.792533 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.805907 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.833823 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.847104 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.860209 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.872744 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:05Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.873105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.873170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.873187 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.873214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.873232 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.975916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.975985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.975999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.976019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:05 crc kubenswrapper[4747]: I0202 08:57:05.976033 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:05Z","lastTransitionTime":"2026-02-02T08:57:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.079453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.079526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.079550 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.079578 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.079602 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.182883 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.182959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.182972 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.182997 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.183024 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.262801 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj"] Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.263471 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.267260 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.267549 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.286122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.286207 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.286235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.286268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.286291 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.300257 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.300573 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 12:32:43.940936935 +0000 UTC Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.318159 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.334094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.338775 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:06 crc kubenswrapper[4747]: E0202 08:57:06.338885 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.338992 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:06 crc kubenswrapper[4747]: E0202 08:57:06.339165 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.353554 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.371103 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.388409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.388452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.388462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.388478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.388492 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.401814 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.412270 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5e5ffc87-756b-441c-8001-8bcc1def73cd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.412312 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5e5ffc87-756b-441c-8001-8bcc1def73cd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.412345 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5e5ffc87-756b-441c-8001-8bcc1def73cd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.412464 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gq97\" (UniqueName: \"kubernetes.io/projected/5e5ffc87-756b-441c-8001-8bcc1def73cd-kube-api-access-5gq97\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.417762 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.433452 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.446295 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.458158 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.473413 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.487423 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.491283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.491331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.491344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.491361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.491372 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.504369 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.512951 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5e5ffc87-756b-441c-8001-8bcc1def73cd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.513005 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5e5ffc87-756b-441c-8001-8bcc1def73cd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.513045 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5e5ffc87-756b-441c-8001-8bcc1def73cd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.513083 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gq97\" (UniqueName: \"kubernetes.io/projected/5e5ffc87-756b-441c-8001-8bcc1def73cd-kube-api-access-5gq97\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.514449 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5e5ffc87-756b-441c-8001-8bcc1def73cd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.514503 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5e5ffc87-756b-441c-8001-8bcc1def73cd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.521170 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5e5ffc87-756b-441c-8001-8bcc1def73cd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.522840 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.536517 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gq97\" (UniqueName: \"kubernetes.io/projected/5e5ffc87-756b-441c-8001-8bcc1def73cd-kube-api-access-5gq97\") pod \"ovnkube-control-plane-749d76644c-hfvzj\" (UID: \"5e5ffc87-756b-441c-8001-8bcc1def73cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.536555 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.550127 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:06Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.578565 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" Feb 02 08:57:06 crc kubenswrapper[4747]: W0202 08:57:06.593253 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e5ffc87_756b_441c_8001_8bcc1def73cd.slice/crio-0c1d1554ee9157651990267133594de6b1f2eb244021f58836d74d9053796202 WatchSource:0}: Error finding container 0c1d1554ee9157651990267133594de6b1f2eb244021f58836d74d9053796202: Status 404 returned error can't find the container with id 0c1d1554ee9157651990267133594de6b1f2eb244021f58836d74d9053796202 Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.593271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.593355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.593383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.593418 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.593441 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.649457 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" event={"ID":"5e5ffc87-756b-441c-8001-8bcc1def73cd","Type":"ContainerStarted","Data":"0c1d1554ee9157651990267133594de6b1f2eb244021f58836d74d9053796202"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.651792 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/1.log" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.696241 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.696280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.696289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.696303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.696315 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.799372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.799395 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.799405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.799421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.799432 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.901923 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.902236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.902249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.902266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.902278 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:06Z","lastTransitionTime":"2026-02-02T08:57:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.993092 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-t5t4m"] Feb 02 08:57:06 crc kubenswrapper[4747]: I0202 08:57:06.993723 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:06 crc kubenswrapper[4747]: E0202 08:57:06.993810 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.004990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.005040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.005055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.005075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.005091 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.005759 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.020009 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.045246 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.057359 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.068894 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.079634 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.089654 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.098689 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.107368 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.107662 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.107752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.107850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.107957 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.116707 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.118856 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtnrz\" (UniqueName: \"kubernetes.io/projected/83246640-90cc-4bd6-b508-9e2ebdcda8c2-kube-api-access-wtnrz\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.119058 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.126767 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.136311 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.149279 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.165326 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.181955 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.191925 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.203094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.210225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.210255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.210264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.210276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.210285 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.214152 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.220672 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtnrz\" (UniqueName: \"kubernetes.io/projected/83246640-90cc-4bd6-b508-9e2ebdcda8c2-kube-api-access-wtnrz\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.220745 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:07 crc kubenswrapper[4747]: E0202 08:57:07.220904 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:07 crc kubenswrapper[4747]: E0202 08:57:07.221000 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:07.720978211 +0000 UTC m=+40.265316654 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.236027 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtnrz\" (UniqueName: \"kubernetes.io/projected/83246640-90cc-4bd6-b508-9e2ebdcda8c2-kube-api-access-wtnrz\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.301618 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 14:06:11.581080315 +0000 UTC Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.312259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.312301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.312310 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.312323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.312333 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.338720 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:07 crc kubenswrapper[4747]: E0202 08:57:07.338847 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.415272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.415315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.415325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.415340 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.415352 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.518261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.518316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.518334 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.518359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.518381 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.620873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.620921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.620962 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.620983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.620997 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.660386 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" event={"ID":"5e5ffc87-756b-441c-8001-8bcc1def73cd","Type":"ContainerStarted","Data":"6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.660484 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" event={"ID":"5e5ffc87-756b-441c-8001-8bcc1def73cd","Type":"ContainerStarted","Data":"75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.676226 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.694365 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.708343 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.723967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.724015 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.724025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.724042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.724054 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.725356 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:07 crc kubenswrapper[4747]: E0202 08:57:07.725658 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:07 crc kubenswrapper[4747]: E0202 08:57:07.725820 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:08.725781121 +0000 UTC m=+41.270119604 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.727499 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.738401 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.753163 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.767374 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.781759 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.796235 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.812071 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.826718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.826773 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.826786 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.826810 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.826824 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.827154 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.840523 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.856845 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.870491 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.885536 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.905444 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.927863 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:07Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.929121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.929238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.929264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.929294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:07 crc kubenswrapper[4747]: I0202 08:57:07.929313 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:07Z","lastTransitionTime":"2026-02-02T08:57:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.031795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.031850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.031866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.031887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.031902 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.137253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.137311 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.137328 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.137352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.137370 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.240255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.240279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.240287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.240299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.240307 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.302764 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 13:08:43.001631279 +0000 UTC Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.338481 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.338528 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.338596 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:08 crc kubenswrapper[4747]: E0202 08:57:08.339296 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:08 crc kubenswrapper[4747]: E0202 08:57:08.339156 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:08 crc kubenswrapper[4747]: E0202 08:57:08.338928 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.343488 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.343539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.343551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.343569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.343583 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.354977 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.374313 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.392613 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.411665 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.423785 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.445642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.445684 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.445693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.445707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.445720 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.453094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.467543 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.477642 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.494316 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.517644 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.537888 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.549210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.549261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.549272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.549287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.549299 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.549343 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.561073 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.575037 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.589882 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.602106 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.618301 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:08Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.651706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.651745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.651757 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.651774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.651826 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.738482 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:08 crc kubenswrapper[4747]: E0202 08:57:08.738658 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:08 crc kubenswrapper[4747]: E0202 08:57:08.738772 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:10.738740255 +0000 UTC m=+43.283078718 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.754376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.754426 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.754436 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.754451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.754462 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.857584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.857637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.857650 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.857671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.857686 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.960835 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.960890 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.960904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.960921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:08 crc kubenswrapper[4747]: I0202 08:57:08.960951 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:08Z","lastTransitionTime":"2026-02-02T08:57:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.064281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.064366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.064391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.064420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.064442 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.167196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.167252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.167269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.167292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.167310 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.269434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.269505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.269539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.269567 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.269587 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.303713 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 20:34:44.434943138 +0000 UTC Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.339391 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:09 crc kubenswrapper[4747]: E0202 08:57:09.339788 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.340138 4747 scope.go:117] "RemoveContainer" containerID="f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.372389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.372425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.372434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.372449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.372459 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.474613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.474643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.474652 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.474673 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.474693 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.577201 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.577263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.577272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.577285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.577295 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.667397 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.669833 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.670274 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.680146 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.680203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.680219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.680237 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.680253 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.687785 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.703185 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.718144 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.734972 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.754116 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.766604 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.779505 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.783400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.783446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.783461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.783481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.783495 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.795078 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.826686 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.837990 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.849120 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.868337 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.881274 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.885706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.885737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.885748 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.885763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.885773 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.895887 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.908496 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.909806 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.910476 4747 scope.go:117] "RemoveContainer" containerID="9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79" Feb 02 08:57:09 crc kubenswrapper[4747]: E0202 08:57:09.910680 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.923476 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.950593 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8190ca30b223826b3968f3e9f38515ac1703bb689617509ed764a0a0262e74c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:03Z\\\",\\\"message\\\":\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string(nil), Routers:[]string{\\\\\\\"GR_crc\\\\\\\"}, Groups:[]string(nil)}, services.LB{Name:\\\\\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nI0202 08:57:03.282700 6044 services_controller.go:453] Built service default/kubernetes template LB for network=default: []services.LB{}\\\\nI0202 08:57:03.282715 6044 services_controller.go:454] Service default/kubernetes for network=default has 0 cluster-wide, 1 per-node configs, 0 template configs, making 0 (cluster) 2 (per node) and 0 (template) load balancers\\\\nF0202 08:57:03.282724 6044 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.964723 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.978029 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.987973 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.988002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.988011 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.988040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.988050 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:09Z","lastTransitionTime":"2026-02-02T08:57:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:09 crc kubenswrapper[4747]: I0202 08:57:09.991434 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:09Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.009725 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.024167 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.039045 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.055626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.072600 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.091302 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.091334 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.091341 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.091378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.091388 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.106606 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.118479 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.128427 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.142872 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.166059 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.177454 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.186479 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.194549 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.194582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.194592 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.194608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.194619 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.197133 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.208917 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:10Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.296828 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.296863 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.296872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.296885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.296894 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.304229 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 15:13:16.586820266 +0000 UTC Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.338855 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.338883 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.338991 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:10 crc kubenswrapper[4747]: E0202 08:57:10.339052 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:10 crc kubenswrapper[4747]: E0202 08:57:10.339182 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:10 crc kubenswrapper[4747]: E0202 08:57:10.339300 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.398408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.398461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.398475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.398493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.398506 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.501276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.501317 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.501340 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.501367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.501382 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.604068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.604114 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.604154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.604175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.604185 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.706768 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.706843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.706867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.706902 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.706927 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.761094 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:10 crc kubenswrapper[4747]: E0202 08:57:10.761340 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:10 crc kubenswrapper[4747]: E0202 08:57:10.761469 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:14.761441003 +0000 UTC m=+47.305779476 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.809440 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.809481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.809492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.809507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.809520 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.912877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.913007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.913024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.913043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:10 crc kubenswrapper[4747]: I0202 08:57:10.913058 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:10Z","lastTransitionTime":"2026-02-02T08:57:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.015010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.015057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.015066 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.015081 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.015091 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.117812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.117846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.117854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.117868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.117877 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.220557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.220599 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.220609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.220622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.220632 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.305324 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 04:27:48.825661133 +0000 UTC Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.323179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.323218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.323228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.323242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.323286 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.338454 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:11 crc kubenswrapper[4747]: E0202 08:57:11.338542 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.426090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.426138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.426150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.426167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.426179 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.528432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.528475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.528487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.528507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.528519 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.630372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.630415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.630425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.630438 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.630448 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.732909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.732975 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.732987 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.733038 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.733049 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.835314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.835352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.835363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.835378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.835389 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.938035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.938137 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.938147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.938169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:11 crc kubenswrapper[4747]: I0202 08:57:11.938181 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:11Z","lastTransitionTime":"2026-02-02T08:57:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.040297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.040351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.040364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.040380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.040393 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.142846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.142885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.142896 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.142912 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.142923 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.244827 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.244885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.244895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.244908 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.244918 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.306213 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 06:26:15.88934139 +0000 UTC Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.338705 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.338746 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.338708 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:12 crc kubenswrapper[4747]: E0202 08:57:12.338924 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:12 crc kubenswrapper[4747]: E0202 08:57:12.338815 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:12 crc kubenswrapper[4747]: E0202 08:57:12.339192 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.347468 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.347554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.347574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.347599 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.347616 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.450600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.450648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.450659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.450675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.450685 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.553302 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.553358 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.553374 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.553398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.553420 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.656229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.656278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.656289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.656305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.656322 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.759406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.759481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.759493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.759518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.759531 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.861985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.862115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.862130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.862175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.862193 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.965344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.965387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.965399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.965582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:12 crc kubenswrapper[4747]: I0202 08:57:12.965594 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:12Z","lastTransitionTime":"2026-02-02T08:57:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.069122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.069163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.069174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.069187 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.069197 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.172235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.172313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.172347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.172377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.172401 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.275056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.275096 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.275109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.275144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.275155 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.306991 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 23:22:25.444483481 +0000 UTC Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.339369 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:13 crc kubenswrapper[4747]: E0202 08:57:13.339549 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.377420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.377465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.377480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.377501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.377522 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.481018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.481085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.481108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.481138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.481164 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.584714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.584755 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.584765 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.584782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.584793 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.686879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.686961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.686980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.687003 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.687014 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.790405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.790464 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.790475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.790494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.790506 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.893303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.893380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.893399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.893423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.893443 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.996136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.996199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.996212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.996231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:13 crc kubenswrapper[4747]: I0202 08:57:13.996242 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:13Z","lastTransitionTime":"2026-02-02T08:57:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.099535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.099593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.099610 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.099630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.099644 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.203134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.203197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.203209 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.203224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.203235 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.306035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.306079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.306089 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.306105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.306116 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.307764 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 13:59:07.33565906 +0000 UTC Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.339172 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.339278 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.339322 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.339364 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.339508 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.339663 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.409167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.409262 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.409281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.409306 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.409323 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.512115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.512157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.512170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.512188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.512201 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.614219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.614259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.614269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.614284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.614295 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.717501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.717548 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.717561 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.717624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.717643 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.789210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.789249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.789261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.789277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.789289 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.801996 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.802209 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.802310 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:22.802287461 +0000 UTC m=+55.346625934 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.804752 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:14Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.808628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.808673 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.808682 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.808695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.808705 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.823230 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:14Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.827369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.827405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.827417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.827432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.827443 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.842121 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:14Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.845901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.845951 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.845960 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.845971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.845980 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.860415 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:14Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.864658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.864702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.864715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.864732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.864745 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.876574 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:14Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:14 crc kubenswrapper[4747]: E0202 08:57:14.877019 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.878724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.878876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.878998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.879083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.879156 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.982271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.982338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.982354 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.982377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:14 crc kubenswrapper[4747]: I0202 08:57:14.982394 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:14Z","lastTransitionTime":"2026-02-02T08:57:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.085245 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.085286 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.085298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.085312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.085324 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.187292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.187366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.187383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.187409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.187426 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.290171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.290239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.290257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.290281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.290300 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.308542 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 06:06:26.231041503 +0000 UTC Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.338456 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:15 crc kubenswrapper[4747]: E0202 08:57:15.338635 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.393633 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.393687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.393704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.393725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.393738 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.496816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.496867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.496882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.496901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.496913 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.600177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.600243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.600263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.600286 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.600303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.702557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.702624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.702648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.702671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.702690 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.805342 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.805396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.805405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.805419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.805443 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.909150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.910140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.910234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.910270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:15 crc kubenswrapper[4747]: I0202 08:57:15.910294 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:15Z","lastTransitionTime":"2026-02-02T08:57:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.012563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.012604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.012614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.012631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.012642 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.114569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.114616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.114641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.114663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.114679 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.218507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.218588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.218601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.218622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.218637 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.308830 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 15:38:19.349660679 +0000 UTC Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.320568 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.320603 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.320611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.320623 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.320633 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.339322 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:16 crc kubenswrapper[4747]: E0202 08:57:16.339531 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.339560 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.339599 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:16 crc kubenswrapper[4747]: E0202 08:57:16.339722 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:16 crc kubenswrapper[4747]: E0202 08:57:16.340653 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.423206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.423256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.423272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.423293 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.423310 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.525586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.525621 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.525632 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.525645 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.525655 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.628086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.628150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.628168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.628192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.628211 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.731100 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.731161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.731179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.731202 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.731218 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.834218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.834279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.834298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.834335 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.834366 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.936991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.937045 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.937056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.937073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:16 crc kubenswrapper[4747]: I0202 08:57:16.937085 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:16Z","lastTransitionTime":"2026-02-02T08:57:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.039526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.039566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.039576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.039592 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.039603 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.142726 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.142828 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.142893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.142925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.143017 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.245851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.245917 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.245982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.246013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.246038 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.309920 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 15:03:45.572615574 +0000 UTC Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.338789 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:17 crc kubenswrapper[4747]: E0202 08:57:17.339052 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.348606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.348666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.348676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.348694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.348707 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.451722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.451789 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.451802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.451821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.451832 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.554090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.554135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.554145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.554162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.554171 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.657221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.657269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.657279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.657296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.657310 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.759891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.759953 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.759963 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.759980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.759991 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.862614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.862675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.862697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.862727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.862748 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.965614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.965654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.965665 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.965680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:17 crc kubenswrapper[4747]: I0202 08:57:17.965693 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:17Z","lastTransitionTime":"2026-02-02T08:57:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.068088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.068173 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.068207 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.068236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.068256 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.171246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.171318 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.171341 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.171371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.171424 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.274608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.274670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.274686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.274715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.274733 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.311206 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 21:46:06.413469354 +0000 UTC Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.338831 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.338837 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:18 crc kubenswrapper[4747]: E0202 08:57:18.339261 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.339289 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:18 crc kubenswrapper[4747]: E0202 08:57:18.339417 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:18 crc kubenswrapper[4747]: E0202 08:57:18.339604 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.362548 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.377280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.377312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.377322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.377336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.377347 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.378564 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.395006 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.421650 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.435950 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.446415 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.457518 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.468380 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.480916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.480978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.480991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.481009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.481021 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.508481 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.528965 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.540277 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.554958 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.569088 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.582507 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.584346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.584556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.584637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.584732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.584792 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.600526 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.623414 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.633858 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:18Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.687884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.687980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.687994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.688015 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.688055 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.791920 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.792052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.792077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.792108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.792129 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.894191 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.894223 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.894233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.894246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.894256 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.997483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.997546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.997565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.997589 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:18 crc kubenswrapper[4747]: I0202 08:57:18.997607 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:18Z","lastTransitionTime":"2026-02-02T08:57:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.100821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.100916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.100983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.101022 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.101043 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.203679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.203746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.203764 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.203787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.203804 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.306483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.306524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.306534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.306547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.306559 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.311858 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 17:39:17.544022493 +0000 UTC Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.338506 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:19 crc kubenswrapper[4747]: E0202 08:57:19.338647 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.408902 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.408996 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.409023 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.409051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.409072 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.512461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.512529 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.512551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.512578 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.512603 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.615312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.615363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.615373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.615390 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.615401 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.718971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.719054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.719079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.719103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.719120 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.821525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.821570 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.821587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.821605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.821618 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.924710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.924759 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.924772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.924792 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:19 crc kubenswrapper[4747]: I0202 08:57:19.924806 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:19Z","lastTransitionTime":"2026-02-02T08:57:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.027171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.027237 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.027248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.027265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.027276 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.053139 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.053337 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:57:52.05330978 +0000 UTC m=+84.597648283 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.130615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.130664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.130675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.130690 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.130701 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.154260 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.154334 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.154396 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.154433 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154429 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154497 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154496 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154516 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154549 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154575 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:52.154555275 +0000 UTC m=+84.698893728 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154581 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154598 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:52.154587496 +0000 UTC m=+84.698925949 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154606 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154688 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154702 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:52.154679658 +0000 UTC m=+84.699018121 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.154864 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:52.154825612 +0000 UTC m=+84.699164085 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.233545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.233619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.233634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.233651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.233662 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.312191 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 15:05:49.507671463 +0000 UTC Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.336692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.336750 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.336769 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.336790 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.336806 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.339249 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.339314 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.339480 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.339614 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.339660 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:20 crc kubenswrapper[4747]: E0202 08:57:20.339799 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.440285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.440351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.440372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.440399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.440417 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.543445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.543517 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.543541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.543570 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.543593 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.647579 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.647644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.647662 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.647691 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.647716 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.750156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.750217 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.750235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.750263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.750280 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.852663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.852716 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.852728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.852746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.852758 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.955259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.955332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.955347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.955365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:20 crc kubenswrapper[4747]: I0202 08:57:20.955400 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:20Z","lastTransitionTime":"2026-02-02T08:57:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.057794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.057846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.057858 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.057877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.057890 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.160533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.160576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.160586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.160601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.160609 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.262985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.263032 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.263041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.263056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.263067 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.312568 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 09:09:19.528343334 +0000 UTC Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.339041 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:21 crc kubenswrapper[4747]: E0202 08:57:21.339262 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.365455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.365502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.365533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.365550 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.365561 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.468095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.468151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.468161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.468175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.468183 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.571182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.571222 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.571234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.571250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.571262 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.673983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.674048 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.674066 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.674089 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.674108 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.777018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.777077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.777102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.777125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.777144 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.879473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.879539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.879557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.879581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.879599 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.982453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.982494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.982502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.982515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:21 crc kubenswrapper[4747]: I0202 08:57:21.982524 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:21Z","lastTransitionTime":"2026-02-02T08:57:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.084853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.084908 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.084925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.084994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.085012 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.188046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.188104 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.188117 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.188135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.188148 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.290846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.290888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.290897 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.290911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.290922 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.313306 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 09:37:29.110017815 +0000 UTC Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.339110 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:22 crc kubenswrapper[4747]: E0202 08:57:22.339242 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.339669 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:22 crc kubenswrapper[4747]: E0202 08:57:22.339778 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.339683 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:22 crc kubenswrapper[4747]: E0202 08:57:22.340144 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.340388 4747 scope.go:117] "RemoveContainer" containerID="9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.393451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.393512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.393533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.393559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.393581 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.495626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.495656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.495664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.495677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.495685 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.598220 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.598275 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.598285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.598303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.598315 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.701002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.701047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.701059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.701074 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.701090 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.710739 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/1.log" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.712962 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.714072 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.726003 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.751084 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.768241 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.791418 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.803444 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.803482 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.803491 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.803507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.803517 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.808659 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.822872 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.837268 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.850448 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.872170 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.877829 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.883479 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.885339 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:22 crc kubenswrapper[4747]: E0202 08:57:22.885484 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:22 crc kubenswrapper[4747]: E0202 08:57:22.885577 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:57:38.885555798 +0000 UTC m=+71.429894261 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.893285 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.905378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.905407 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.905417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.905429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.905437 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:22Z","lastTransitionTime":"2026-02-02T08:57:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.906709 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.909248 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.918510 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.929870 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.944597 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.963852 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.973693 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.984896 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:22 crc kubenswrapper[4747]: I0202 08:57:22.997476 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:22Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.007992 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.008026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.008036 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.008048 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.008057 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.012367 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.030834 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.048895 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.059966 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.071132 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.088221 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.100684 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.111167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.111210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.111220 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.111236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.111246 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.112321 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.126906 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.148049 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.159918 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.170829 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.182812 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.195480 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.213149 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.213636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.213671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.213682 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.213699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.213711 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.222832 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.313636 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 07:25:37.829173061 +0000 UTC Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.317780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.317849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.317865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.317896 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.317908 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.339521 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:23 crc kubenswrapper[4747]: E0202 08:57:23.339722 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.421167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.421238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.421250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.421267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.421279 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.523831 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.523888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.523916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.523979 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.524005 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.626777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.627600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.627672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.627737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.627835 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.717595 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/2.log" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.718432 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/1.log" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.721248 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01" exitCode=1 Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.721334 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.721389 4747 scope.go:117] "RemoveContainer" containerID="9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.722122 4747 scope.go:117] "RemoveContainer" containerID="b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01" Feb 02 08:57:23 crc kubenswrapper[4747]: E0202 08:57:23.722367 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.729716 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.729792 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.729806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.729826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.729844 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.737883 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.750196 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.766814 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.779709 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.793134 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.806297 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.817071 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.827844 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.831805 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.831838 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.831846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.831860 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.831872 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.849076 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.865485 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.877204 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.893198 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.905221 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.917316 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.929917 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.934216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.934259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.934271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.934286 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.934297 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:23Z","lastTransitionTime":"2026-02-02T08:57:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.942495 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.962732 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:23 crc kubenswrapper[4747]: I0202 08:57:23.973407 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.036636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.036675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.036685 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.036701 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.036712 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.139470 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.139502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.139513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.139530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.139541 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.242037 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.242072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.242082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.242098 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.242109 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.314709 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 07:57:34.506216173 +0000 UTC Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.338698 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.338827 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:24 crc kubenswrapper[4747]: E0202 08:57:24.338895 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:24 crc kubenswrapper[4747]: E0202 08:57:24.339280 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.339307 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:24 crc kubenswrapper[4747]: E0202 08:57:24.339478 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.345850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.345916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.345981 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.346014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.346037 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.448698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.448763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.448779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.448802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.448820 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.551300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.551362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.551376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.551391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.551425 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.653754 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.653815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.653832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.653855 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.653873 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.688372 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.702352 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.715379 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.727820 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/2.log" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.730685 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.732455 4747 scope.go:117] "RemoveContainer" containerID="b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01" Feb 02 08:57:24 crc kubenswrapper[4747]: E0202 08:57:24.732718 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.743849 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.755971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.756020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.756033 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.756053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.756066 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.764222 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d1dd7f92e544886dc3b77b58bf005d81049e8784399b91d0e9302c09def0a79\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:04Z\\\",\\\"message\\\":\\\"snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.253:2379: 10.217.5.253:9979:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {de17f0de-cfb1-4534-bb42-c40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 08:57:04.493917 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-scheduler-operator/metrics\\\\\\\"}\\\\nI0202 08:57:04.494044 6212 services_controller.go:360] Finished syncing service metrics on namespace openshift-kube-scheduler-operator for network=default : 1.207451ms\\\\nI0202 08:57:04.494131 6212 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 08:57:04.494370 6212 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd/etcd\\\\\\\"}\\\\nI0202 08:57:04.494400 6212 services_controller.go:360] Finished syncing service etcd on namespace openshift-etcd for network=default : 1.887828ms\\\\nI0202 08:57:04.495025 6212 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 08:57:04.495057 6212 ovnkube.go:599] Stopped ovnkube\\\\nI0202 08:57:04.495074 6212 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 08:57:04.495125 6212 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.775084 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.787441 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.800608 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.813035 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.827131 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.839963 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.853897 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.858666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.858752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.858768 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.858792 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.858808 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.865632 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.879882 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.899029 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.913560 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.926075 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.941187 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.956331 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.961080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.961127 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.961144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.961169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.961187 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:24Z","lastTransitionTime":"2026-02-02T08:57:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.970305 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:24 crc kubenswrapper[4747]: I0202 08:57:24.985227 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.000810 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:24Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.015481 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.027143 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.042662 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.054494 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.064169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.064210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.064222 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.064237 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.064250 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.073522 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.084357 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.094145 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.110446 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.129060 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.131240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.131326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.131375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.131398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.131414 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.139809 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: E0202 08:57:25.144317 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.148153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.148181 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.148190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.148202 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.148210 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.152970 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: E0202 08:57:25.159051 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.162026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.162065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.162073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.162088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.162097 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.167515 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: E0202 08:57:25.173516 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.177543 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.177580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.177594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.177611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.177623 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.180050 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: E0202 08:57:25.188453 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.191616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.191672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.191693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.191710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.191746 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.192662 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: E0202 08:57:25.202697 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:25Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:25 crc kubenswrapper[4747]: E0202 08:57:25.202826 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.204363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.204412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.204421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.204434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.204445 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.306839 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.306879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.306891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.306909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.306920 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.315922 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 14:11:39.660321016 +0000 UTC Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.339336 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:25 crc kubenswrapper[4747]: E0202 08:57:25.339490 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.409084 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.409140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.409151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.409168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.409181 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.512038 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.512142 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.512155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.512178 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.512194 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.614570 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.614665 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.614687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.614718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.614749 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.717255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.717310 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.717329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.717349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.717365 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.820593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.820642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.820651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.820668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.820680 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.923586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.923642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.923658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.923679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:25 crc kubenswrapper[4747]: I0202 08:57:25.923696 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:25Z","lastTransitionTime":"2026-02-02T08:57:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.026445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.026476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.026487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.026500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.026514 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.128843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.128877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.128886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.128899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.128910 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.231498 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.231531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.231541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.231554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.231564 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.316663 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 13:32:49.515367973 +0000 UTC Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.334053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.334088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.334096 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.334111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.334122 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.338557 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:26 crc kubenswrapper[4747]: E0202 08:57:26.338670 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.338762 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:26 crc kubenswrapper[4747]: E0202 08:57:26.338847 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.338863 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:26 crc kubenswrapper[4747]: E0202 08:57:26.338924 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.436600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.436647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.436659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.436677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.436688 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.539849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.539897 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.539912 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.539929 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.539975 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.642647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.642686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.642696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.642710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.642719 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.744600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.744638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.744646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.744658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.744667 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.847502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.847556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.847566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.847581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.847592 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.949674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.949712 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.949721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.949738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:26 crc kubenswrapper[4747]: I0202 08:57:26.949757 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:26Z","lastTransitionTime":"2026-02-02T08:57:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.052677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.052719 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.052730 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.052761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.052776 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.154485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.154522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.154532 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.154544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.154554 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.256793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.256830 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.256841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.256853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.256862 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.317233 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 20:13:19.959747244 +0000 UTC Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.339086 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:27 crc kubenswrapper[4747]: E0202 08:57:27.339249 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.358752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.358800 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.358812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.358837 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.358850 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.461652 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.461719 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.461740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.461768 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.461788 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.565088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.565123 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.565136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.565151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.565162 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.668053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.668116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.668140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.668169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.668190 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.771447 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.771483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.771493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.771508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.771517 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.874993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.875036 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.875052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.875103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.875121 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.978437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.978545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.978579 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.978611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:27 crc kubenswrapper[4747]: I0202 08:57:27.978634 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:27Z","lastTransitionTime":"2026-02-02T08:57:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.081391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.081450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.081462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.081481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.081494 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.185615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.185660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.185672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.185688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.185699 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.288290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.288357 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.288378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.288405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.288425 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.317917 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 18:30:01.137641056 +0000 UTC Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.338971 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.339046 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:28 crc kubenswrapper[4747]: E0202 08:57:28.339093 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.339145 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:28 crc kubenswrapper[4747]: E0202 08:57:28.339327 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:28 crc kubenswrapper[4747]: E0202 08:57:28.339417 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.351209 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.366566 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.386315 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.391826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.391879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.391901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.391929 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.391976 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.399829 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.424959 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.442822 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.455048 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.467464 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.478549 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.491721 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.494850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.494882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.494894 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.494914 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.494926 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.504490 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.518186 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.529278 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.538483 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.557985 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.568353 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.577073 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.594595 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:28Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.596857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.596911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.596924 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.596962 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.596978 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.699478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.699520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.699531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.699545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.699557 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.803414 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.803506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.804129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.804235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.804303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.909205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.909592 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.909698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.909780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:28 crc kubenswrapper[4747]: I0202 08:57:28.909873 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:28Z","lastTransitionTime":"2026-02-02T08:57:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.013648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.013908 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.014072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.014153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.014240 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.116660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.116689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.116697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.116710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.116718 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.219748 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.219788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.219799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.219816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.219832 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.318384 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 03:44:46.598300885 +0000 UTC Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.322842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.322925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.322987 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.323017 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.323040 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.339427 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:29 crc kubenswrapper[4747]: E0202 08:57:29.339664 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.426492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.426559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.426571 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.426588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.426598 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.528869 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.528958 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.528972 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.528990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.529003 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.631911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.632008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.632026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.632051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.632069 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.734415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.734474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.734500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.734528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.734550 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.837914 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.838004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.838021 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.838043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.838058 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.941328 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.941404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.941427 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.941457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:29 crc kubenswrapper[4747]: I0202 08:57:29.941478 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:29Z","lastTransitionTime":"2026-02-02T08:57:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.044492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.044558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.044575 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.044598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.044616 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.147263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.147318 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.147336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.147361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.147380 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.249534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.249573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.249583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.249598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.249610 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.318829 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 21:06:56.002815342 +0000 UTC Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.339260 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.339302 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:30 crc kubenswrapper[4747]: E0202 08:57:30.339420 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:30 crc kubenswrapper[4747]: E0202 08:57:30.339507 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.339819 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:30 crc kubenswrapper[4747]: E0202 08:57:30.340000 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.352688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.352727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.352736 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.352751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.352760 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.455331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.455381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.455395 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.455417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.455433 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.558425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.558484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.558506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.558531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.558548 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.660593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.660641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.660656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.660677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.660693 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.763554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.763606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.763621 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.763640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.763653 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.866602 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.866680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.866707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.866737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.866759 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.969726 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.969767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.969782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.969815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:30 crc kubenswrapper[4747]: I0202 08:57:30.969835 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:30Z","lastTransitionTime":"2026-02-02T08:57:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.072800 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.072841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.072853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.072867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.072877 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.176265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.176299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.176308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.176324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.176337 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.279655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.279699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.279711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.279736 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.279750 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.319874 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 05:26:44.878426101 +0000 UTC Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.339417 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:31 crc kubenswrapper[4747]: E0202 08:57:31.339593 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.382985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.383020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.383033 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.383053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.383067 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.485695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.485758 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.485775 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.485810 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.485825 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.589236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.589277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.589289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.589307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.589319 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.691507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.691559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.691572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.691590 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.691604 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.794124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.794152 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.794161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.794173 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.794181 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.897133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.897182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.897199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.897216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:31 crc kubenswrapper[4747]: I0202 08:57:31.897228 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:31Z","lastTransitionTime":"2026-02-02T08:57:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.000981 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.001029 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.001040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.001065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.001079 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.103513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.103542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.103551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.103565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.103577 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.206263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.206315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.206331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.206352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.206368 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.309099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.309141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.309154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.309172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.309184 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.320843 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 21:27:10.079725763 +0000 UTC Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.339217 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:32 crc kubenswrapper[4747]: E0202 08:57:32.339338 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.339422 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:32 crc kubenswrapper[4747]: E0202 08:57:32.339658 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.339756 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:32 crc kubenswrapper[4747]: E0202 08:57:32.339905 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.411619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.411682 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.411694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.411725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.411737 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.514194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.514225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.514239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.514257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.514269 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.616534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.616574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.616583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.616598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.616609 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.719865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.719909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.719919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.719978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.720001 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.822469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.822537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.822549 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.822569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.822580 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.924768 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.924796 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.924804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.924817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:32 crc kubenswrapper[4747]: I0202 08:57:32.924826 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:32Z","lastTransitionTime":"2026-02-02T08:57:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.026756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.026786 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.026793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.026804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.026812 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.129606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.129640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.129648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.129660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.129670 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.232411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.232465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.232483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.232505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.232521 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.321872 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 14:52:05.070795181 +0000 UTC Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.334604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.334634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.334644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.334656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.334665 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.339238 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:33 crc kubenswrapper[4747]: E0202 08:57:33.339406 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.353996 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.437868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.437911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.437922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.437955 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.437966 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.540296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.540380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.540396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.540419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.540432 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.643266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.643301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.643313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.643329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.643339 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.745672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.745711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.745719 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.745750 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.745765 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.847844 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.847884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.847893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.847906 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.847915 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.950547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.950589 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.950601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.950620 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:33 crc kubenswrapper[4747]: I0202 08:57:33.950630 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:33Z","lastTransitionTime":"2026-02-02T08:57:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.053387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.053423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.053432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.053445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.053454 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.156223 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.156271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.156280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.156297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.156308 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.258455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.258493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.258507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.258520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.258532 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.322249 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 08:02:53.793724108 +0000 UTC Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.338622 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.338701 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:34 crc kubenswrapper[4747]: E0202 08:57:34.338751 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:34 crc kubenswrapper[4747]: E0202 08:57:34.338872 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.339034 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:34 crc kubenswrapper[4747]: E0202 08:57:34.339196 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.364016 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.364076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.364088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.364105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.364122 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.466528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.466583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.466596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.466617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.466633 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.569806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.569868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.569884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.569903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.569917 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.672728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.672767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.672777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.672814 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.672824 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.775023 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.775076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.775087 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.775105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.775118 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.878079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.878130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.878147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.878175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.878202 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.980918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.980982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.980994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.981010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:34 crc kubenswrapper[4747]: I0202 08:57:34.981024 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:34Z","lastTransitionTime":"2026-02-02T08:57:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.083247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.083288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.083297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.083312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.083322 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.186300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.186385 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.186411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.186443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.186468 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.206541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.206585 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.206600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.206618 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.206632 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.221468 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:35Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.226134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.226200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.226211 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.226243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.226258 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.243526 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:35Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.247689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.247724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.247732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.247746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.247757 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.259394 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:35Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.263194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.263442 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.263460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.263485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.263506 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.276264 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:35Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.279505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.279542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.279553 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.279568 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.279579 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.290028 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:35Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.290139 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.291309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.291340 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.291352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.291366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.291381 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.323152 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 05:37:23.962348798 +0000 UTC Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.338679 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.339131 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.339302 4747 scope.go:117] "RemoveContainer" containerID="b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01" Feb 02 08:57:35 crc kubenswrapper[4747]: E0202 08:57:35.339436 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.394346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.394393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.394404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.394421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.394432 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.497108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.497160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.497172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.497190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.497202 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.599231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.599288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.599301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.599317 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.599326 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.701908 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.701961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.701972 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.701987 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.702000 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.804417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.804456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.804467 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.804481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.804490 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.906982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.907050 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.907063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.907079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:35 crc kubenswrapper[4747]: I0202 08:57:35.907092 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:35Z","lastTransitionTime":"2026-02-02T08:57:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.010203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.010265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.010282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.010307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.010324 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.114292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.114355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.114367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.114386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.114400 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.217850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.217903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.217913 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.217930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.217960 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.320919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.321024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.321037 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.321076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.321087 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.324313 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 17:26:12.3088688 +0000 UTC Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.338950 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.338985 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:36 crc kubenswrapper[4747]: E0202 08:57:36.339085 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.339143 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:36 crc kubenswrapper[4747]: E0202 08:57:36.339257 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:36 crc kubenswrapper[4747]: E0202 08:57:36.339440 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.423344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.423378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.423389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.423404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.423415 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.526276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.526312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.526323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.526338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.526351 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.628868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.628912 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.628920 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.628958 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.628970 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.731353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.731392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.731403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.731418 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.731429 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.834003 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.834052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.834063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.834077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.834086 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.936139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.936176 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.936203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.936218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:36 crc kubenswrapper[4747]: I0202 08:57:36.936251 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:36Z","lastTransitionTime":"2026-02-02T08:57:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.038207 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.038249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.038259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.038276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.038290 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.139771 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.139809 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.139818 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.139831 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.139840 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.242561 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.242607 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.242617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.242633 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.242645 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.324840 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 16:25:36.852646057 +0000 UTC Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.338598 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:37 crc kubenswrapper[4747]: E0202 08:57:37.338709 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.345373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.345432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.345452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.345518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.345543 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.448827 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.448864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.448876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.448892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.448904 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.550636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.550678 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.550692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.550706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.550717 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.652746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.652788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.652800 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.652816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.652826 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.755113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.755155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.755165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.755182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.755195 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.857632 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.857672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.857683 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.857699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.857712 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.959894 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.959951 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.959963 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.959976 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:37 crc kubenswrapper[4747]: I0202 08:57:37.959986 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:37Z","lastTransitionTime":"2026-02-02T08:57:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.062821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.063176 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.063196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.063213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.063224 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.165193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.165236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.165248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.165262 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.165273 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.266969 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.267006 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.267015 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.267028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.267037 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.325450 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 03:33:43.458215927 +0000 UTC Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.338748 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.338821 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:38 crc kubenswrapper[4747]: E0202 08:57:38.338883 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:38 crc kubenswrapper[4747]: E0202 08:57:38.338967 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.339071 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:38 crc kubenswrapper[4747]: E0202 08:57:38.339213 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.350559 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.363527 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.370604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.370646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.370658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.370676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.370712 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.376356 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.387213 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.396485 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.406043 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.425550 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.436329 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.445426 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.458863 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.473288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.473314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.473322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.473336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.473344 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.478158 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.491207 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.503331 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.518562 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.531626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.550740 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.562857 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.573836 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89bc2250-c3d3-4bb8-a88f-885badc62a54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4559c8937d2ea98cda42aba4464c3a8841f5d782a7d1607c94c49d0d48c6c006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.575352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.575375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.575386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.575400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.575411 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.585661 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:38Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.677431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.677465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.677476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.677492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.677502 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.779381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.779417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.779427 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.779441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.779452 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.882154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.882183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.882192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.882204 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.882215 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.953788 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:38 crc kubenswrapper[4747]: E0202 08:57:38.953904 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:38 crc kubenswrapper[4747]: E0202 08:57:38.953978 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:58:10.953961969 +0000 UTC m=+103.498300392 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.985315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.985354 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.985363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.985376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:38 crc kubenswrapper[4747]: I0202 08:57:38.985388 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:38Z","lastTransitionTime":"2026-02-02T08:57:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.087970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.088024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.088035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.088049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.088058 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.191639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.191890 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.192028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.192113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.192179 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.294415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.294446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.294457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.294472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.294484 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.326212 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 05:19:26.003452394 +0000 UTC Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.338586 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:39 crc kubenswrapper[4747]: E0202 08:57:39.338721 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.397180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.397230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.397247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.397272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.397290 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.499895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.499984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.500006 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.500028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.500044 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.604723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.605121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.605139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.605158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.605168 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.707948 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.707996 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.708011 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.708028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.708039 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.809756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.809804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.809816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.809833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.809844 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.911929 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.911988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.911999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.912014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:39 crc kubenswrapper[4747]: I0202 08:57:39.912025 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:39Z","lastTransitionTime":"2026-02-02T08:57:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.014587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.014630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.014642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.014657 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.014669 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.118138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.118189 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.118199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.118216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.118226 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.220645 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.220677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.220686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.220698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.220707 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.323268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.323313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.323322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.323336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.323347 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.326553 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 14:11:39.722355796 +0000 UTC Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.338892 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.338964 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.338892 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:40 crc kubenswrapper[4747]: E0202 08:57:40.339021 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:40 crc kubenswrapper[4747]: E0202 08:57:40.339118 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:40 crc kubenswrapper[4747]: E0202 08:57:40.339201 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.425738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.425766 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.425776 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.425793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.425804 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.528178 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.528242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.528252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.528268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.528278 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.631079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.631132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.631143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.631161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.631171 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.733961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.734010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.734025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.734046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.734065 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.782355 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/0.log" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.782413 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1945e48-8aba-4a55-8dce-18e4a87ce4c5" containerID="feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c" exitCode=1 Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.782457 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerDied","Data":"feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.782927 4747 scope.go:117] "RemoveContainer" containerID="feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.804586 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.817568 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.829573 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.837291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.837337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.837353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.837377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.837394 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.845665 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.860122 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:40Z\\\",\\\"message\\\":\\\"2026-02-02T08:56:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e\\\\n2026-02-02T08:56:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e to /host/opt/cni/bin/\\\\n2026-02-02T08:56:55Z [verbose] multus-daemon started\\\\n2026-02-02T08:56:55Z [verbose] Readiness Indicator file check\\\\n2026-02-02T08:57:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.878242 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.890026 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.905065 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89bc2250-c3d3-4bb8-a88f-885badc62a54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4559c8937d2ea98cda42aba4464c3a8841f5d782a7d1607c94c49d0d48c6c006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.918715 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.931805 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.939844 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.939910 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.939924 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.939963 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.939980 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:40Z","lastTransitionTime":"2026-02-02T08:57:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.946139 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.959854 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.971913 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.982879 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:40 crc kubenswrapper[4747]: I0202 08:57:40.993440 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:40Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.009165 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.022517 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.035277 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.042261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.042314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.042326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.042349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.042364 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.045247 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.145000 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.145038 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.145046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.145060 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.145070 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.247593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.247643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.247659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.247684 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.247701 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.327260 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 10:23:38.509580974 +0000 UTC Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.338877 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:41 crc kubenswrapper[4747]: E0202 08:57:41.339013 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.349121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.349143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.349154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.349167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.349178 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.452044 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.452085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.452093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.452108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.452120 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.555140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.555185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.555196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.555213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.555224 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.657137 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.657182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.657193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.657212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.657224 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.759732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.759824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.759837 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.759857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.759869 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.788543 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/0.log" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.788611 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerStarted","Data":"2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.803251 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.818766 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.833688 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.846217 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.861827 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.863424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.863556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.863620 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.863688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.863754 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.875978 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.887974 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.898627 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.917342 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.928299 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.937158 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.950984 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.963815 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:40Z\\\",\\\"message\\\":\\\"2026-02-02T08:56:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e\\\\n2026-02-02T08:56:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e to /host/opt/cni/bin/\\\\n2026-02-02T08:56:55Z [verbose] multus-daemon started\\\\n2026-02-02T08:56:55Z [verbose] Readiness Indicator file check\\\\n2026-02-02T08:57:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.966609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.966631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.966640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.966654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.966663 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:41Z","lastTransitionTime":"2026-02-02T08:57:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.984032 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:41 crc kubenswrapper[4747]: I0202 08:57:41.996149 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:41Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.005775 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89bc2250-c3d3-4bb8-a88f-885badc62a54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4559c8937d2ea98cda42aba4464c3a8841f5d782a7d1607c94c49d0d48c6c006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:42Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.016431 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:42Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.027270 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:42Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.039020 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:42Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.068693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.068728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.068738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.068752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.068761 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.171197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.171237 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.171246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.171258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.171269 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.273814 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.273859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.273870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.273888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.273900 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.328425 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 18:04:39.415380245 +0000 UTC Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.339105 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.339105 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:42 crc kubenswrapper[4747]: E0202 08:57:42.339310 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:42 crc kubenswrapper[4747]: E0202 08:57:42.339353 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.339141 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:42 crc kubenswrapper[4747]: E0202 08:57:42.339467 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.376215 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.376294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.376319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.376343 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.376365 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.478268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.478314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.478324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.478343 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.478353 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.581321 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.581365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.581377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.581394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.581405 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.683847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.683915 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.683986 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.684022 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.684047 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.788507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.788756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.788905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.789233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.789363 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.892064 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.892287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.892455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.892565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.892659 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.995467 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.995501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.995510 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.995523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:42 crc kubenswrapper[4747]: I0202 08:57:42.995544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:42Z","lastTransitionTime":"2026-02-02T08:57:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.098289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.098336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.098353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.098377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.098395 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.201804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.202067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.202154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.202305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.202416 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.305293 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.305703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.305918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.306135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.306491 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.329613 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 23:02:20.409069429 +0000 UTC Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.339089 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:43 crc kubenswrapper[4747]: E0202 08:57:43.339551 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.409779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.410337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.410588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.410824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.411096 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.515376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.515431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.515441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.515459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.515470 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.617602 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.617641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.617652 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.617669 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.617681 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.720978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.721042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.721054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.721073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.721119 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.823631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.823686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.823703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.823725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.823743 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.926146 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.926185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.926196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.926210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:43 crc kubenswrapper[4747]: I0202 08:57:43.926219 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:43Z","lastTransitionTime":"2026-02-02T08:57:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.028782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.028830 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.028840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.028853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.028863 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.131430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.131499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.131522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.131551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.131571 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.235023 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.235113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.235136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.235165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.235187 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.330089 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 03:33:39.949007471 +0000 UTC Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.337821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.337860 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.337871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.337886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.337898 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.338435 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.338533 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:44 crc kubenswrapper[4747]: E0202 08:57:44.338636 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.338669 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:44 crc kubenswrapper[4747]: E0202 08:57:44.338713 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:44 crc kubenswrapper[4747]: E0202 08:57:44.338815 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.439832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.439873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.439883 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.439899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.439910 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.542500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.542535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.542550 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.542570 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.542584 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.645295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.645385 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.645396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.645411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.645421 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.747673 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.747723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.747737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.747753 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.747783 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.850034 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.850077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.850086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.850102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.850113 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.952809 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.952869 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.952888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.952909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:44 crc kubenswrapper[4747]: I0202 08:57:44.952923 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:44Z","lastTransitionTime":"2026-02-02T08:57:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.056695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.056746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.056776 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.056794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.056804 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.159138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.159219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.159244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.159278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.159301 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.262310 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.262343 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.262351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.262387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.262397 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.330606 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 21:49:23.775579601 +0000 UTC Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.339065 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:45 crc kubenswrapper[4747]: E0202 08:57:45.339281 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.365046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.365112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.365132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.365158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.365177 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.467990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.468062 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.468085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.468114 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.468135 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.498305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.498367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.498385 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.498412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.498431 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: E0202 08:57:45.521702 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:45Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.526401 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.526441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.526451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.526463 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.526473 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: E0202 08:57:45.542679 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:45Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.548063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.548112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.548122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.548139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.548148 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: E0202 08:57:45.560094 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:45Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.564276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.564332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.564347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.564369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.564384 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: E0202 08:57:45.579156 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:45Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.583210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.583377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.583492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.583625 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.583782 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: E0202 08:57:45.598024 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4171cdc0-0933-45c6-9d27-161671337117\\\",\\\"systemUUID\\\":\\\"d70abc6b-ab9b-46ee-8b6c-2747d8bea427\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:45Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:45 crc kubenswrapper[4747]: E0202 08:57:45.598159 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.600404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.600442 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.600454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.600476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.600488 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.703577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.703625 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.703639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.703662 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.703676 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.806100 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.806163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.806177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.806205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.806224 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.909784 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.909872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.909891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.909917 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:45 crc kubenswrapper[4747]: I0202 08:57:45.910002 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:45Z","lastTransitionTime":"2026-02-02T08:57:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.013017 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.013073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.013090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.013112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.013130 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.115703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.115757 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.115769 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.115785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.115796 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.218603 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.218674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.218710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.218740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.218763 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.322043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.322162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.322257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.322280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.322338 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.340336 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 12:14:10.740485177 +0000 UTC Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.340481 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.340551 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:46 crc kubenswrapper[4747]: E0202 08:57:46.340586 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.340617 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:46 crc kubenswrapper[4747]: E0202 08:57:46.340709 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:46 crc kubenswrapper[4747]: E0202 08:57:46.340785 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.425676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.425775 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.425789 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.425813 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.425830 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.528750 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.528794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.528806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.528824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.528838 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.632316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.632377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.632387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.632402 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.632412 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.735528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.735588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.735615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.735641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.735849 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.839686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.839741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.839753 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.839774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.839789 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.942099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.942161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.942174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.942193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:46 crc kubenswrapper[4747]: I0202 08:57:46.942205 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:46Z","lastTransitionTime":"2026-02-02T08:57:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.044159 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.044191 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.044236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.044253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.044264 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.146831 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.146903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.146918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.146959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.146971 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.249103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.249176 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.249192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.249213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.249227 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.339095 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:47 crc kubenswrapper[4747]: E0202 08:57:47.339271 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.341254 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 21:02:48.53493014 +0000 UTC Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.351599 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.351640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.351650 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.351662 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.351672 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.454291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.454349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.454364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.454387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.454402 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.556550 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.556593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.556604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.556619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.556632 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.659748 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.659817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.659852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.659879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.659900 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.762386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.762416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.762425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.762440 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.762451 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.865130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.865173 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.865183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.865203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.865215 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.967660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.967910 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.967919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.967960 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:47 crc kubenswrapper[4747]: I0202 08:57:47.967970 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:47Z","lastTransitionTime":"2026-02-02T08:57:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.070892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.070981 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.071000 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.071024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.071044 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.174025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.174073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.174092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.174143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.174199 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.276211 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.276258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.276269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.276285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.276298 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.338915 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.339007 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.339144 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:48 crc kubenswrapper[4747]: E0202 08:57:48.339290 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:48 crc kubenswrapper[4747]: E0202 08:57:48.339405 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:48 crc kubenswrapper[4747]: E0202 08:57:48.339595 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.341981 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 10:12:20.60293946 +0000 UTC Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.357095 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.373196 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.378797 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.378846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.378882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.378907 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.378923 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.389303 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.408473 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.424114 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.435367 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.446072 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.456871 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.476125 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.481160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.481208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.481221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.481238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.481253 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.488142 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.499089 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.513523 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.523745 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.534161 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89bc2250-c3d3-4bb8-a88f-885badc62a54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4559c8937d2ea98cda42aba4464c3a8841f5d782a7d1607c94c49d0d48c6c006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.547631 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.560636 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.580462 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.584421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.584449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.584460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.584474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.584485 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.595428 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:40Z\\\",\\\"message\\\":\\\"2026-02-02T08:56:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e\\\\n2026-02-02T08:56:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e to /host/opt/cni/bin/\\\\n2026-02-02T08:56:55Z [verbose] multus-daemon started\\\\n2026-02-02T08:56:55Z [verbose] Readiness Indicator file check\\\\n2026-02-02T08:57:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.627633 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:48Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.686815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.686852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.686862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.686877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.686888 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.789112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.789151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.789161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.789175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.789185 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.891138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.891175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.891184 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.891197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.891205 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.993174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.993232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.993247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.993269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:48 crc kubenswrapper[4747]: I0202 08:57:48.993293 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:48Z","lastTransitionTime":"2026-02-02T08:57:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.096213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.096257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.096266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.096283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.096294 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.198822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.198879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.198891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.198909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.198931 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.301374 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.301414 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.301425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.301441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.301453 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.339167 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:49 crc kubenswrapper[4747]: E0202 08:57:49.339617 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.339806 4747 scope.go:117] "RemoveContainer" containerID="b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.342162 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 08:04:44.525826012 +0000 UTC Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.403820 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.403850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.403858 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.403870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.403880 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.506969 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.507352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.507367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.507384 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.507397 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.609862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.609896 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.609904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.609919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.609929 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.712613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.712653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.712665 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.712681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.712692 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.814665 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.814723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.814733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.814758 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.814779 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.816206 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/2.log" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.820011 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.820532 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.880805 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5ffc87-756b-441c-8001-8bcc1def73cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75481accdd956d7a5b8656018a14e384fcc53278b34028b5ca9a497ffe1053f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e8478ee360027256de16a8eca43aba389b44bba7a428914dd692107e231cf07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gq97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hfvzj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.901082 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff9b1fbe-12b3-49da-87e7-85c10ac955fc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T08:56:47Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 08:56:47.613109 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 08:56:47.613437 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 08:56:47.614676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2512815481/tls.crt::/tmp/serving-cert-2512815481/tls.key\\\\\\\"\\\\nI0202 08:56:47.933272 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 08:56:47.937085 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 08:56:47.937100 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 08:56:47.937119 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 08:56:47.937124 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 08:56:47.942827 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0202 08:56:47.942878 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942890 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 08:56:47.942903 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 08:56:47.942911 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nI0202 08:56:47.942892 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 08:56:47.942918 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 08:56:47.942989 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 08:56:47.945400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.916742 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ffd306e2a09f2806255757cb0fce719443a73d0476e0b905f4baf43a76f9fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://807982cd9965cba6e0497bdfd90bdff7ada5c61a2c1fb547bd020f2d643840a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.917202 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.917232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.917243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.917259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.917271 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:49Z","lastTransitionTime":"2026-02-02T08:57:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.931150 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48061e404ac6ab208cb9802145fb5cc37a92ec6b4a3fb2a14f4d0930a551b86c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b92ss\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g8f8b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.946622 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-v96w7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"10256a9d-49d9-40ff-ba5d-dd4a6a372593\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdbdf012841332b62e9fe70b22155e3d5929ddf6ee42ad66e39ee0fbf8672fd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cf78p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:55Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-v96w7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.964514 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fac17015-5b1a-466e-b7bb-aec8fa867d5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7df42aca08b4babd7acefeb8eb55ebd39bdfdca86b2bdf00df51d26740f5fd70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed2f02e27a1933724720d60ae65595cdfb6ebea17cde9159ee3b0c54c66d239e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f1847887f0b40f2bce0accc61d100236b290f44f11d3ae9310195f58c341e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823ffc27b120000cc1a440189d9134724e5680badcf88353d7867062523f3ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1cb92ee8e0e93c9ba09f7c62a486356a6bef0dceacee0170626249243f87a3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e308a0f0df5a03b3fa010994d0cfbc9d423883691c4f7db67d63bce7d4e884b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20b88158ba280ae9f55106170a11a4a46e4eea52cb9f23adc3a2a9eb66da21a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe3c426d18071de9592cfff10d19c363e461ccff3830245d9ebedafdd4fe8a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.975283 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c098e2b3f27f11d3ea5e57ec3cb5c0458a0ab9b6b26066341db741c2ba9f6403\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:49 crc kubenswrapper[4747]: I0202 08:57:49.985626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gjwhc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1c5e693-ed04-4480-8159-91dc592b7edb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf50e88ecd1cdafc5694d1852edd80c0f2a55538a58ea7f64e4eb07de39531a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vpz7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gjwhc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:49Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.002528 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313d57a3-8403-49fb-b300-c82ec66f4339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a2a03dcab1082995c4fd823a47d06d0172447d1f6e6dc54ddd533498cdd282\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0db790f07958054ee54a0684de3e8c021d84b88d2b18a55cd870fd4c5838881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccf977cb6de223e4c373337a165ba91b6e88561f6948cf95045b6eb8fc373068\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ede38e2a053f40549ebaa4af213ea02388a640036982ff3804db4e020183db4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55098592f6054a260d82c3562103c92c4da97c444b654bc6c5a9cdd283afa0c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c93357ce85f129455ba42bb073d0728f1f50bf8afea340503bd9e85a06a54d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6f4fa5a857e01aded00d388ec9ea2cb2c192bd5344a03a3b15608a38e41ec5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:57:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bx6b6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vjrcp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.016064 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvnm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b1945e48-8aba-4a55-8dce-18e4a87ce4c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:40Z\\\",\\\"message\\\":\\\"2026-02-02T08:56:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e\\\\n2026-02-02T08:56:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8db56637-bc51-4a51-8bab-71040ee04f9e to /host/opt/cni/bin/\\\\n2026-02-02T08:56:55Z [verbose] multus-daemon started\\\\n2026-02-02T08:56:55Z [verbose] Readiness Indicator file check\\\\n2026-02-02T08:57:40Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg9c5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvnm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.019778 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.019812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.019822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.019837 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.019848 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.044335 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3dfe801-a30e-4352-bec1-869e46ad5f0a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T08:57:23Z\\\",\\\"message\\\":\\\"Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0202 08:57:23.250166 6446 services_controller.go:452] Built service openshift-ingress/router-internal-default per-node LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250176 6446 services_controller.go:453] Built service openshift-ingress/router-internal-default template LB for network=default: []services.LB{}\\\\nI0202 08:57:23.250183 6446 services_controller.go:454] Service openshift-ingress/router-internal-default for network=default has 3 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0202 08:57:23.250199 6446 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:23Z is after 2025-08-24T17:21:41Z]\\\\nI0202 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T08:57:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:57:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kc4tt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7782\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.055641 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83246640-90cc-4bd6-b508-9e2ebdcda8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wtnrz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:57:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t5t4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.066473 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89bc2250-c3d3-4bb8-a88f-885badc62a54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4559c8937d2ea98cda42aba4464c3a8841f5d782a7d1607c94c49d0d48c6c006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a88b2fcd0847343d78b62806681226f21740b89d1d1c48cf14db4a97552471fc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.079772 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5e5c8830-76cd-47e5-933e-7dfb4d22a093\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:57:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39e2ed9559984e3211beca8774d6e01e7099ba9ed1873138f515ad4afb04cfbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4b8c16629a17ad50798d4aea28563e2e6c21c7c57fb972062b0f01ef158e20e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://26f9892c180b137cb57d3e63754637b9f181728e13c1ecb847ed8e77c7fff4c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c819c0b26af54c072c943a35542b683fde972ed1b053c88455b9cd4c80a516a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T08:56:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.093281 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.106183 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:49Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00321b04021451388af9cffa01c9564638828c82a63e73e3754cc9b83283862a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.119409 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e65e0102-9794-43a9-af2a-41bf5520b4e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fcee6276fccc1c2f2c5befda62f8e856b9ab9308ed251d9fb7ed48dabb008c48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9239143b53c9b8b8becd323262881f210a1f3f17cb808354da5078920445177f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54a09638f4aaa0a3fdf11c6b5917032684f7d933cd6274d9887160b30e0ef234\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T08:56:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T08:56:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.123201 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.123245 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.123256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.123274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.123286 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.137456 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.150306 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T08:56:48Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T08:57:50Z is after 2025-08-24T17:21:41Z" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.225993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.226046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.226058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.226074 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.226085 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.328342 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.328381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.328394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.328411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.328423 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.339095 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.339230 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.339317 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:50 crc kubenswrapper[4747]: E0202 08:57:50.339654 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:50 crc kubenswrapper[4747]: E0202 08:57:50.339743 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:50 crc kubenswrapper[4747]: E0202 08:57:50.339552 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.342414 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 01:11:30.298020835 +0000 UTC Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.430606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.430651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.430660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.430674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.430686 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.534047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.534112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.534129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.534153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.534170 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.637187 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.637234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.637250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.637272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.637284 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.739467 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.739507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.739525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.739542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.739554 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.825223 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/3.log" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.825813 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/2.log" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.828636 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" exitCode=1 Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.828740 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.828790 4747 scope.go:117] "RemoveContainer" containerID="b5d76be96d56cb4ebc1b44020160889b5ec2f267223f908414e3a60dd9146d01" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.829289 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 08:57:50 crc kubenswrapper[4747]: E0202 08:57:50.829453 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.843469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.843510 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.843526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.843548 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.843566 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.853921 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-gjwhc" podStartSLOduration=58.8539066 podStartE2EDuration="58.8539066s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:50.853159421 +0000 UTC m=+83.397497854" watchObservedRunningTime="2026-02-02 08:57:50.8539066 +0000 UTC m=+83.398245033" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.899603 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-vjrcp" podStartSLOduration=58.899584349 podStartE2EDuration="58.899584349s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:50.870539146 +0000 UTC m=+83.414877579" watchObservedRunningTime="2026-02-02 08:57:50.899584349 +0000 UTC m=+83.443922792" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.912588 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=60.912561342000004 podStartE2EDuration="1m0.912561342s" podCreationTimestamp="2026-02-02 08:56:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:50.900690238 +0000 UTC m=+83.445028701" watchObservedRunningTime="2026-02-02 08:57:50.912561342 +0000 UTC m=+83.456899775" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.945754 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.945802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.945812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.945829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.945842 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:50Z","lastTransitionTime":"2026-02-02T08:57:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:50 crc kubenswrapper[4747]: I0202 08:57:50.966880 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-pvnm7" podStartSLOduration=58.966858102 podStartE2EDuration="58.966858102s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:50.966757179 +0000 UTC m=+83.511095612" watchObservedRunningTime="2026-02-02 08:57:50.966858102 +0000 UTC m=+83.511196535" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.014098 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=18.014081011 podStartE2EDuration="18.014081011s" podCreationTimestamp="2026-02-02 08:57:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:51.01405767 +0000 UTC m=+83.558396103" watchObservedRunningTime="2026-02-02 08:57:51.014081011 +0000 UTC m=+83.558419444" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.048584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.048624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.048642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.048659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.048670 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.052796 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=29.052726721 podStartE2EDuration="29.052726721s" podCreationTimestamp="2026-02-02 08:57:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:51.038323292 +0000 UTC m=+83.582661745" watchObservedRunningTime="2026-02-02 08:57:51.052726721 +0000 UTC m=+83.597065154" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.067266 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=60.067248672 podStartE2EDuration="1m0.067248672s" podCreationTimestamp="2026-02-02 08:56:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:51.06678327 +0000 UTC m=+83.611121713" watchObservedRunningTime="2026-02-02 08:57:51.067248672 +0000 UTC m=+83.611587105" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.094284 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podStartSLOduration=59.094267704 podStartE2EDuration="59.094267704s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:51.093531145 +0000 UTC m=+83.637869588" watchObservedRunningTime="2026-02-02 08:57:51.094267704 +0000 UTC m=+83.638606137" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.103012 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-v96w7" podStartSLOduration=59.102995268 podStartE2EDuration="59.102995268s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:51.102139136 +0000 UTC m=+83.646477569" watchObservedRunningTime="2026-02-02 08:57:51.102995268 +0000 UTC m=+83.647333701" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.113567 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hfvzj" podStartSLOduration=58.113548778 podStartE2EDuration="58.113548778s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:51.113221739 +0000 UTC m=+83.657560192" watchObservedRunningTime="2026-02-02 08:57:51.113548778 +0000 UTC m=+83.657887211" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.129517 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=63.129498066 podStartE2EDuration="1m3.129498066s" podCreationTimestamp="2026-02-02 08:56:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:51.127904605 +0000 UTC m=+83.672243038" watchObservedRunningTime="2026-02-02 08:57:51.129498066 +0000 UTC m=+83.673836499" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.150805 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.150832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.150840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.150852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.150861 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.253339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.253375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.253386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.253401 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.253412 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.338386 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:51 crc kubenswrapper[4747]: E0202 08:57:51.338551 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.343693 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 11:17:44.609811123 +0000 UTC Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.355795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.355846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.355859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.355874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.355885 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.458767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.458801 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.458811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.458825 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.458835 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.561273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.561337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.561350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.561365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.561378 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.663798 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.663851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.663861 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.663878 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.663888 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.766339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.766408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.766427 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.766451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.766468 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.839568 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/3.log" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.843296 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 08:57:51 crc kubenswrapper[4747]: E0202 08:57:51.843460 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.869583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.869643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.869654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.869674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.869685 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.971746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.971805 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.971818 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.971836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:51 crc kubenswrapper[4747]: I0202 08:57:51.971847 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:51Z","lastTransitionTime":"2026-02-02T08:57:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.075027 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.075057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.075067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.075083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.075095 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.096578 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.096801 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:58:56.09676963 +0000 UTC m=+148.641108083 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.177412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.177511 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.177527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.177856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.177871 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.198050 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.198319 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.198459 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.198566 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.198212 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.198793 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:58:56.198774541 +0000 UTC m=+148.743112974 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.198466 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.198979 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.199063 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.198524 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.198656 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.199216 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.199229 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.199388 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 08:58:56.199147381 +0000 UTC m=+148.743485814 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.199501 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 08:58:56.19948605 +0000 UTC m=+148.743824483 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.199600 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 08:58:56.199587802 +0000 UTC m=+148.743926235 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.280334 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.280375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.280388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.280403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.280413 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.339177 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.339248 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.339313 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.339396 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.339456 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:52 crc kubenswrapper[4747]: E0202 08:57:52.339578 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.343816 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 02:42:06.758201772 +0000 UTC Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.382363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.382435 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.382446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.382460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.382471 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.484723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.484785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.484797 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.484817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.484829 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.586700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.586730 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.586738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.586751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.586759 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.688689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.688742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.688754 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.688772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.688785 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.791670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.791733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.791758 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.791775 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.791786 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.893721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.893763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.893774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.893790 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.893803 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.997257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.997566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.997645 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.997758 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:52 crc kubenswrapper[4747]: I0202 08:57:52.997845 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:52Z","lastTransitionTime":"2026-02-02T08:57:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.100064 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.100106 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.100128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.100150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.100167 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.203071 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.203132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.203144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.203163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.203175 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.306246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.306296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.306306 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.306326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.306337 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.338805 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:53 crc kubenswrapper[4747]: E0202 08:57:53.339008 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.344151 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 05:09:41.272572264 +0000 UTC Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.409734 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.409772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.409784 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.409799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.409811 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.513469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.513520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.513533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.513554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.513573 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.616861 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.616965 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.616986 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.617010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.617028 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.720300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.720360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.720369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.720386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.720397 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.822556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.822605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.822624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.822644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.822660 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.925574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.925606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.925614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.925627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:53 crc kubenswrapper[4747]: I0202 08:57:53.925637 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:53Z","lastTransitionTime":"2026-02-02T08:57:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.028347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.028398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.028414 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.028436 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.028453 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.130043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.130076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.130085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.130097 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.130105 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.232496 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.232541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.232552 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.232568 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.232580 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.335150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.335193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.335203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.335219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.335233 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.338467 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.338563 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:54 crc kubenswrapper[4747]: E0202 08:57:54.338587 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.338632 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:54 crc kubenswrapper[4747]: E0202 08:57:54.338698 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:54 crc kubenswrapper[4747]: E0202 08:57:54.338889 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.345459 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 20:11:58.512464274 +0000 UTC Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.437505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.437539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.437548 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.437569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.437581 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.542438 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.542491 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.542500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.542517 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.542539 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.645189 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.645234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.645248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.645264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.645278 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.747707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.747751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.747763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.747780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.747789 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.850110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.850167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.850180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.850194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.850206 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.952875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.952919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.953126 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.953144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:54 crc kubenswrapper[4747]: I0202 08:57:54.953155 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:54Z","lastTransitionTime":"2026-02-02T08:57:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.055585 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.055617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.055626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.055638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.055647 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.158002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.158047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.158063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.158084 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.158101 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.260649 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.260721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.260749 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.260779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.260800 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.338694 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:55 crc kubenswrapper[4747]: E0202 08:57:55.338865 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.346227 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 19:31:04.640963399 +0000 UTC Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.363838 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.363875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.363886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.363905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.363915 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.466611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.466676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.466693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.466715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.466732 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.569699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.569792 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.569824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.569864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.569890 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.673350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.673404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.673416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.673434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.673450 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.733351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.733467 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.733543 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.733576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.733644 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T08:57:55Z","lastTransitionTime":"2026-02-02T08:57:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.793874 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj"] Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.794285 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.796925 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.797309 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.798128 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.798612 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.939031 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.939088 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.939105 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.939122 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:55 crc kubenswrapper[4747]: I0202 08:57:55.939136 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.040177 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.040218 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.040240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.040256 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.040276 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.040296 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.040425 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.041178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.046782 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.057431 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e58e6d7-a62d-45c7-992e-7088d0c2fbd6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4dblj\" (UID: \"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.111667 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" Feb 02 08:57:56 crc kubenswrapper[4747]: W0202 08:57:56.125549 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e58e6d7_a62d_45c7_992e_7088d0c2fbd6.slice/crio-d0e5d8f7219d18a0d735b6421758b2814f1d5b7a9ddc7955e86ab44ec450fd72 WatchSource:0}: Error finding container d0e5d8f7219d18a0d735b6421758b2814f1d5b7a9ddc7955e86ab44ec450fd72: Status 404 returned error can't find the container with id d0e5d8f7219d18a0d735b6421758b2814f1d5b7a9ddc7955e86ab44ec450fd72 Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.338634 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.338666 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:56 crc kubenswrapper[4747]: E0202 08:57:56.338826 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.338871 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:56 crc kubenswrapper[4747]: E0202 08:57:56.338979 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:56 crc kubenswrapper[4747]: E0202 08:57:56.339059 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.346421 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 03:30:55.94122662 +0000 UTC Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.346477 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.356556 4747 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.859027 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" event={"ID":"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6","Type":"ContainerStarted","Data":"0a7f3ed99b7121b5c0aa6fe432300e9a25c0cc903f8065f5f9ba35e7a16c5b0c"} Feb 02 08:57:56 crc kubenswrapper[4747]: I0202 08:57:56.859076 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" event={"ID":"0e58e6d7-a62d-45c7-992e-7088d0c2fbd6","Type":"ContainerStarted","Data":"d0e5d8f7219d18a0d735b6421758b2814f1d5b7a9ddc7955e86ab44ec450fd72"} Feb 02 08:57:57 crc kubenswrapper[4747]: I0202 08:57:57.338991 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:57 crc kubenswrapper[4747]: E0202 08:57:57.339125 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:57:58 crc kubenswrapper[4747]: I0202 08:57:58.338737 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:57:58 crc kubenswrapper[4747]: I0202 08:57:58.338781 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:57:58 crc kubenswrapper[4747]: I0202 08:57:58.338749 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:57:58 crc kubenswrapper[4747]: E0202 08:57:58.340423 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:57:58 crc kubenswrapper[4747]: E0202 08:57:58.340570 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:57:58 crc kubenswrapper[4747]: E0202 08:57:58.340720 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:57:59 crc kubenswrapper[4747]: I0202 08:57:59.339033 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:57:59 crc kubenswrapper[4747]: E0202 08:57:59.339167 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:00 crc kubenswrapper[4747]: I0202 08:58:00.339482 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:00 crc kubenswrapper[4747]: I0202 08:58:00.339666 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:00 crc kubenswrapper[4747]: E0202 08:58:00.339967 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:00 crc kubenswrapper[4747]: I0202 08:58:00.340093 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:00 crc kubenswrapper[4747]: E0202 08:58:00.340239 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:00 crc kubenswrapper[4747]: E0202 08:58:00.340368 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:01 crc kubenswrapper[4747]: I0202 08:58:01.339404 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:01 crc kubenswrapper[4747]: E0202 08:58:01.339590 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:02 crc kubenswrapper[4747]: I0202 08:58:02.338830 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:02 crc kubenswrapper[4747]: I0202 08:58:02.338829 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:02 crc kubenswrapper[4747]: I0202 08:58:02.338867 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:02 crc kubenswrapper[4747]: E0202 08:58:02.339094 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:02 crc kubenswrapper[4747]: E0202 08:58:02.339192 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:02 crc kubenswrapper[4747]: E0202 08:58:02.339384 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:03 crc kubenswrapper[4747]: I0202 08:58:03.339076 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:03 crc kubenswrapper[4747]: E0202 08:58:03.339316 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:03 crc kubenswrapper[4747]: I0202 08:58:03.340455 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 08:58:03 crc kubenswrapper[4747]: E0202 08:58:03.340718 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:58:04 crc kubenswrapper[4747]: I0202 08:58:04.339092 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:04 crc kubenswrapper[4747]: I0202 08:58:04.339092 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:04 crc kubenswrapper[4747]: E0202 08:58:04.339542 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:04 crc kubenswrapper[4747]: I0202 08:58:04.339115 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:04 crc kubenswrapper[4747]: E0202 08:58:04.339655 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:04 crc kubenswrapper[4747]: E0202 08:58:04.339727 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:05 crc kubenswrapper[4747]: I0202 08:58:05.338393 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:05 crc kubenswrapper[4747]: E0202 08:58:05.338535 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:06 crc kubenswrapper[4747]: I0202 08:58:06.339444 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:06 crc kubenswrapper[4747]: I0202 08:58:06.339447 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:06 crc kubenswrapper[4747]: I0202 08:58:06.339737 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:06 crc kubenswrapper[4747]: E0202 08:58:06.339804 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:06 crc kubenswrapper[4747]: E0202 08:58:06.339969 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:06 crc kubenswrapper[4747]: E0202 08:58:06.340022 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:07 crc kubenswrapper[4747]: I0202 08:58:07.338606 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:07 crc kubenswrapper[4747]: E0202 08:58:07.338757 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:08 crc kubenswrapper[4747]: I0202 08:58:08.339180 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:08 crc kubenswrapper[4747]: I0202 08:58:08.339229 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:08 crc kubenswrapper[4747]: I0202 08:58:08.339267 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:08 crc kubenswrapper[4747]: E0202 08:58:08.339375 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:08 crc kubenswrapper[4747]: E0202 08:58:08.341407 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:08 crc kubenswrapper[4747]: E0202 08:58:08.341678 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:09 crc kubenswrapper[4747]: I0202 08:58:09.339219 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:09 crc kubenswrapper[4747]: E0202 08:58:09.339491 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:10 crc kubenswrapper[4747]: I0202 08:58:10.338873 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:10 crc kubenswrapper[4747]: I0202 08:58:10.338915 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:10 crc kubenswrapper[4747]: I0202 08:58:10.338918 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:10 crc kubenswrapper[4747]: E0202 08:58:10.339070 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:10 crc kubenswrapper[4747]: E0202 08:58:10.339186 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:10 crc kubenswrapper[4747]: E0202 08:58:10.339646 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:10 crc kubenswrapper[4747]: I0202 08:58:10.997345 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:10 crc kubenswrapper[4747]: E0202 08:58:10.997533 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:58:10 crc kubenswrapper[4747]: E0202 08:58:10.997582 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs podName:83246640-90cc-4bd6-b508-9e2ebdcda8c2 nodeName:}" failed. No retries permitted until 2026-02-02 08:59:14.99756617 +0000 UTC m=+167.541904603 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs") pod "network-metrics-daemon-t5t4m" (UID: "83246640-90cc-4bd6-b508-9e2ebdcda8c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 08:58:11 crc kubenswrapper[4747]: I0202 08:58:11.338748 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:11 crc kubenswrapper[4747]: E0202 08:58:11.338970 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:12 crc kubenswrapper[4747]: I0202 08:58:12.338912 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:12 crc kubenswrapper[4747]: I0202 08:58:12.338993 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:12 crc kubenswrapper[4747]: E0202 08:58:12.339093 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:12 crc kubenswrapper[4747]: I0202 08:58:12.339161 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:12 crc kubenswrapper[4747]: E0202 08:58:12.339225 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:12 crc kubenswrapper[4747]: E0202 08:58:12.339296 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:13 crc kubenswrapper[4747]: I0202 08:58:13.339092 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:13 crc kubenswrapper[4747]: E0202 08:58:13.339286 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:14 crc kubenswrapper[4747]: I0202 08:58:14.338707 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:14 crc kubenswrapper[4747]: E0202 08:58:14.338856 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:14 crc kubenswrapper[4747]: I0202 08:58:14.338727 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:14 crc kubenswrapper[4747]: I0202 08:58:14.338962 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:14 crc kubenswrapper[4747]: E0202 08:58:14.339112 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:14 crc kubenswrapper[4747]: E0202 08:58:14.339164 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:15 crc kubenswrapper[4747]: I0202 08:58:15.338740 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:15 crc kubenswrapper[4747]: E0202 08:58:15.339013 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:16 crc kubenswrapper[4747]: I0202 08:58:16.338471 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:16 crc kubenswrapper[4747]: E0202 08:58:16.338704 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:16 crc kubenswrapper[4747]: I0202 08:58:16.338764 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:16 crc kubenswrapper[4747]: E0202 08:58:16.338964 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:16 crc kubenswrapper[4747]: I0202 08:58:16.339143 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:16 crc kubenswrapper[4747]: E0202 08:58:16.339329 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:17 crc kubenswrapper[4747]: I0202 08:58:17.339092 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:17 crc kubenswrapper[4747]: E0202 08:58:17.339329 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:18 crc kubenswrapper[4747]: I0202 08:58:18.339113 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:18 crc kubenswrapper[4747]: I0202 08:58:18.339113 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:18 crc kubenswrapper[4747]: E0202 08:58:18.340403 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:18 crc kubenswrapper[4747]: I0202 08:58:18.340455 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:18 crc kubenswrapper[4747]: E0202 08:58:18.340592 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:18 crc kubenswrapper[4747]: E0202 08:58:18.341154 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:18 crc kubenswrapper[4747]: I0202 08:58:18.341685 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 08:58:18 crc kubenswrapper[4747]: E0202 08:58:18.345436 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p7782_openshift-ovn-kubernetes(b3dfe801-a30e-4352-bec1-869e46ad5f0a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" Feb 02 08:58:19 crc kubenswrapper[4747]: I0202 08:58:19.339068 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:19 crc kubenswrapper[4747]: E0202 08:58:19.339194 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:20 crc kubenswrapper[4747]: I0202 08:58:20.338915 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:20 crc kubenswrapper[4747]: I0202 08:58:20.339022 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:20 crc kubenswrapper[4747]: I0202 08:58:20.339118 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:20 crc kubenswrapper[4747]: E0202 08:58:20.339111 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:20 crc kubenswrapper[4747]: E0202 08:58:20.339264 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:20 crc kubenswrapper[4747]: E0202 08:58:20.339327 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:21 crc kubenswrapper[4747]: I0202 08:58:21.338644 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:21 crc kubenswrapper[4747]: E0202 08:58:21.338755 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:22 crc kubenswrapper[4747]: I0202 08:58:22.338485 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:22 crc kubenswrapper[4747]: I0202 08:58:22.338618 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:22 crc kubenswrapper[4747]: E0202 08:58:22.338687 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:22 crc kubenswrapper[4747]: E0202 08:58:22.338759 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:22 crc kubenswrapper[4747]: I0202 08:58:22.338550 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:22 crc kubenswrapper[4747]: E0202 08:58:22.338846 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:23 crc kubenswrapper[4747]: I0202 08:58:23.339036 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:23 crc kubenswrapper[4747]: E0202 08:58:23.339171 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:24 crc kubenswrapper[4747]: I0202 08:58:24.338568 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:24 crc kubenswrapper[4747]: I0202 08:58:24.338644 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:24 crc kubenswrapper[4747]: I0202 08:58:24.338607 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:24 crc kubenswrapper[4747]: E0202 08:58:24.338818 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:24 crc kubenswrapper[4747]: E0202 08:58:24.338959 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:24 crc kubenswrapper[4747]: E0202 08:58:24.339049 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:25 crc kubenswrapper[4747]: I0202 08:58:25.339109 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:25 crc kubenswrapper[4747]: E0202 08:58:25.339243 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.338379 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:26 crc kubenswrapper[4747]: E0202 08:58:26.338499 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.338379 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.338402 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:26 crc kubenswrapper[4747]: E0202 08:58:26.338722 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:26 crc kubenswrapper[4747]: E0202 08:58:26.338787 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.950608 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/1.log" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.951294 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/0.log" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.951361 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1945e48-8aba-4a55-8dce-18e4a87ce4c5" containerID="2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60" exitCode=1 Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.951410 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerDied","Data":"2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60"} Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.951467 4747 scope.go:117] "RemoveContainer" containerID="feec52cb470c4de236b3ab3f67e791708162ee3a4868e6fc80171e1e65197c4c" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.952192 4747 scope.go:117] "RemoveContainer" containerID="2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60" Feb 02 08:58:26 crc kubenswrapper[4747]: E0202 08:58:26.952377 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-pvnm7_openshift-multus(b1945e48-8aba-4a55-8dce-18e4a87ce4c5)\"" pod="openshift-multus/multus-pvnm7" podUID="b1945e48-8aba-4a55-8dce-18e4a87ce4c5" Feb 02 08:58:26 crc kubenswrapper[4747]: I0202 08:58:26.978819 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4dblj" podStartSLOduration=94.978801316 podStartE2EDuration="1m34.978801316s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:57:56.876660147 +0000 UTC m=+89.420998590" watchObservedRunningTime="2026-02-02 08:58:26.978801316 +0000 UTC m=+119.523139759" Feb 02 08:58:27 crc kubenswrapper[4747]: I0202 08:58:27.338929 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:27 crc kubenswrapper[4747]: E0202 08:58:27.339226 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:27 crc kubenswrapper[4747]: I0202 08:58:27.956607 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/1.log" Feb 02 08:58:28 crc kubenswrapper[4747]: E0202 08:58:28.312011 4747 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 02 08:58:28 crc kubenswrapper[4747]: I0202 08:58:28.339477 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:28 crc kubenswrapper[4747]: I0202 08:58:28.339601 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:28 crc kubenswrapper[4747]: I0202 08:58:28.340531 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:28 crc kubenswrapper[4747]: E0202 08:58:28.340524 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:28 crc kubenswrapper[4747]: E0202 08:58:28.340732 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:28 crc kubenswrapper[4747]: E0202 08:58:28.340861 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:28 crc kubenswrapper[4747]: E0202 08:58:28.424172 4747 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 08:58:29 crc kubenswrapper[4747]: I0202 08:58:29.339145 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:29 crc kubenswrapper[4747]: E0202 08:58:29.339295 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:30 crc kubenswrapper[4747]: I0202 08:58:30.338816 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:30 crc kubenswrapper[4747]: I0202 08:58:30.338888 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:30 crc kubenswrapper[4747]: I0202 08:58:30.338825 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:30 crc kubenswrapper[4747]: E0202 08:58:30.339065 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:30 crc kubenswrapper[4747]: E0202 08:58:30.339143 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:30 crc kubenswrapper[4747]: E0202 08:58:30.339233 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:31 crc kubenswrapper[4747]: I0202 08:58:31.338655 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:31 crc kubenswrapper[4747]: E0202 08:58:31.339107 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:32 crc kubenswrapper[4747]: I0202 08:58:32.339196 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:32 crc kubenswrapper[4747]: I0202 08:58:32.339260 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:32 crc kubenswrapper[4747]: I0202 08:58:32.339304 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:32 crc kubenswrapper[4747]: E0202 08:58:32.339423 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:32 crc kubenswrapper[4747]: E0202 08:58:32.339500 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:32 crc kubenswrapper[4747]: E0202 08:58:32.339618 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:33 crc kubenswrapper[4747]: I0202 08:58:33.338910 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:33 crc kubenswrapper[4747]: E0202 08:58:33.339647 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:33 crc kubenswrapper[4747]: I0202 08:58:33.340138 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 08:58:33 crc kubenswrapper[4747]: E0202 08:58:33.425526 4747 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 08:58:33 crc kubenswrapper[4747]: I0202 08:58:33.978103 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/3.log" Feb 02 08:58:33 crc kubenswrapper[4747]: I0202 08:58:33.980367 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerStarted","Data":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} Feb 02 08:58:33 crc kubenswrapper[4747]: I0202 08:58:33.980926 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:58:34 crc kubenswrapper[4747]: I0202 08:58:34.006165 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podStartSLOduration=102.006147714 podStartE2EDuration="1m42.006147714s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:58:34.005170389 +0000 UTC m=+126.549508822" watchObservedRunningTime="2026-02-02 08:58:34.006147714 +0000 UTC m=+126.550486147" Feb 02 08:58:34 crc kubenswrapper[4747]: I0202 08:58:34.219160 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-t5t4m"] Feb 02 08:58:34 crc kubenswrapper[4747]: I0202 08:58:34.219280 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:34 crc kubenswrapper[4747]: E0202 08:58:34.219364 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:34 crc kubenswrapper[4747]: I0202 08:58:34.340700 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:34 crc kubenswrapper[4747]: I0202 08:58:34.340768 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:34 crc kubenswrapper[4747]: E0202 08:58:34.340897 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:34 crc kubenswrapper[4747]: E0202 08:58:34.340975 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:35 crc kubenswrapper[4747]: I0202 08:58:35.338824 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:35 crc kubenswrapper[4747]: E0202 08:58:35.339028 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:36 crc kubenswrapper[4747]: I0202 08:58:36.339290 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:36 crc kubenswrapper[4747]: I0202 08:58:36.339341 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:36 crc kubenswrapper[4747]: I0202 08:58:36.339411 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:36 crc kubenswrapper[4747]: E0202 08:58:36.339482 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:36 crc kubenswrapper[4747]: E0202 08:58:36.339613 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:36 crc kubenswrapper[4747]: E0202 08:58:36.339758 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:37 crc kubenswrapper[4747]: I0202 08:58:37.338614 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:37 crc kubenswrapper[4747]: E0202 08:58:37.338738 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:38 crc kubenswrapper[4747]: I0202 08:58:38.338948 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:38 crc kubenswrapper[4747]: I0202 08:58:38.338960 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:38 crc kubenswrapper[4747]: I0202 08:58:38.339027 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:38 crc kubenswrapper[4747]: E0202 08:58:38.341704 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:38 crc kubenswrapper[4747]: E0202 08:58:38.341589 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:38 crc kubenswrapper[4747]: E0202 08:58:38.341815 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:38 crc kubenswrapper[4747]: E0202 08:58:38.427175 4747 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 08:58:39 crc kubenswrapper[4747]: I0202 08:58:39.338408 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:39 crc kubenswrapper[4747]: E0202 08:58:39.338605 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:39 crc kubenswrapper[4747]: I0202 08:58:39.929915 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 08:58:40 crc kubenswrapper[4747]: I0202 08:58:40.338847 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:40 crc kubenswrapper[4747]: I0202 08:58:40.338847 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:40 crc kubenswrapper[4747]: E0202 08:58:40.339049 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:40 crc kubenswrapper[4747]: I0202 08:58:40.339091 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:40 crc kubenswrapper[4747]: E0202 08:58:40.339204 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:40 crc kubenswrapper[4747]: E0202 08:58:40.339290 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:41 crc kubenswrapper[4747]: I0202 08:58:41.339446 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:41 crc kubenswrapper[4747]: E0202 08:58:41.339635 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:42 crc kubenswrapper[4747]: I0202 08:58:42.339362 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:42 crc kubenswrapper[4747]: I0202 08:58:42.339436 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:42 crc kubenswrapper[4747]: I0202 08:58:42.339488 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:42 crc kubenswrapper[4747]: E0202 08:58:42.339588 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:42 crc kubenswrapper[4747]: E0202 08:58:42.339954 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:42 crc kubenswrapper[4747]: I0202 08:58:42.340029 4747 scope.go:117] "RemoveContainer" containerID="2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60" Feb 02 08:58:42 crc kubenswrapper[4747]: E0202 08:58:42.340043 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:43 crc kubenswrapper[4747]: I0202 08:58:43.009986 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/1.log" Feb 02 08:58:43 crc kubenswrapper[4747]: I0202 08:58:43.010045 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerStarted","Data":"2cff78528477bd23523c39147d9b5f1e1c358354af733c79baed7b8ed2791b9d"} Feb 02 08:58:43 crc kubenswrapper[4747]: I0202 08:58:43.338638 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:43 crc kubenswrapper[4747]: E0202 08:58:43.338826 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:43 crc kubenswrapper[4747]: E0202 08:58:43.428127 4747 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 08:58:44 crc kubenswrapper[4747]: I0202 08:58:44.339311 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:44 crc kubenswrapper[4747]: E0202 08:58:44.339451 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:44 crc kubenswrapper[4747]: I0202 08:58:44.339319 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:44 crc kubenswrapper[4747]: E0202 08:58:44.339552 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:44 crc kubenswrapper[4747]: I0202 08:58:44.339311 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:44 crc kubenswrapper[4747]: E0202 08:58:44.339609 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:45 crc kubenswrapper[4747]: I0202 08:58:45.339033 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:45 crc kubenswrapper[4747]: E0202 08:58:45.340264 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:46 crc kubenswrapper[4747]: I0202 08:58:46.339367 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:46 crc kubenswrapper[4747]: I0202 08:58:46.339431 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:46 crc kubenswrapper[4747]: I0202 08:58:46.339583 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:46 crc kubenswrapper[4747]: E0202 08:58:46.340110 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:46 crc kubenswrapper[4747]: E0202 08:58:46.340327 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:46 crc kubenswrapper[4747]: E0202 08:58:46.340506 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:47 crc kubenswrapper[4747]: I0202 08:58:47.338770 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:47 crc kubenswrapper[4747]: E0202 08:58:47.339024 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 08:58:48 crc kubenswrapper[4747]: I0202 08:58:48.338831 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:48 crc kubenswrapper[4747]: I0202 08:58:48.338866 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:48 crc kubenswrapper[4747]: I0202 08:58:48.338866 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:48 crc kubenswrapper[4747]: E0202 08:58:48.340127 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 08:58:48 crc kubenswrapper[4747]: E0202 08:58:48.340329 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t5t4m" podUID="83246640-90cc-4bd6-b508-9e2ebdcda8c2" Feb 02 08:58:48 crc kubenswrapper[4747]: E0202 08:58:48.340427 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 08:58:49 crc kubenswrapper[4747]: I0202 08:58:49.338703 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:49 crc kubenswrapper[4747]: I0202 08:58:49.344306 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 08:58:49 crc kubenswrapper[4747]: I0202 08:58:49.345483 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 08:58:50 crc kubenswrapper[4747]: I0202 08:58:50.339055 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:58:50 crc kubenswrapper[4747]: I0202 08:58:50.339098 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:50 crc kubenswrapper[4747]: I0202 08:58:50.339107 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:50 crc kubenswrapper[4747]: I0202 08:58:50.341485 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 08:58:50 crc kubenswrapper[4747]: I0202 08:58:50.341958 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 08:58:50 crc kubenswrapper[4747]: I0202 08:58:50.342047 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 08:58:50 crc kubenswrapper[4747]: I0202 08:58:50.343429 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.163833 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:58:56 crc kubenswrapper[4747]: E0202 08:58:56.164164 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 09:00:58.164112354 +0000 UTC m=+270.708450807 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.265186 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.265273 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.265320 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.265357 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.266316 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.272413 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.272459 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.272485 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.368345 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.374448 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.553621 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 08:58:56 crc kubenswrapper[4747]: I0202 08:58:56.993206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.068257 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g96zn"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.068921 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dcbr8"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.069150 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.069501 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.069879 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.071131 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.081543 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.081717 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.081882 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.082034 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.082163 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.082186 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.082351 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.082508 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.082695 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.082898 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.083389 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.083549 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.083712 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.087431 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.087769 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.088013 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.088212 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.088356 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.093700 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-m55qb"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.094115 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.094445 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.094486 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-m55qb" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.113763 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.113994 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.115721 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.115824 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.115961 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.118679 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-xm7wg"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.135678 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136473 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rcnhl"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.137094 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.137745 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.135220 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.135905 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136011 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136098 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136263 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136335 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136401 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136595 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.136713 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.137127 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.137563 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.137594 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.147526 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.150957 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.151174 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.155454 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.157663 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.157827 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.158047 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.158549 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.159169 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.160858 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.161033 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.162229 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.162424 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.162584 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.162721 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.166858 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-b9jzq"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.168388 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.168698 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.168869 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.168990 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.169078 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.169168 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.169278 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.178481 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.179427 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7dcg9"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.179821 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.180193 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.180430 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181255 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181288 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181309 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-etcd-client\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181327 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181343 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92gdb\" (UniqueName: \"kubernetes.io/projected/2b43f77f-6f91-4311-a016-6fbb58510112-kube-api-access-92gdb\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181359 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181487 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00c5206f-41e3-4fc3-851e-febfc74613a3-serving-cert\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181505 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-etcd-serving-ca\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181523 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxw5l\" (UniqueName: \"kubernetes.io/projected/8da1d50c-554e-4e30-8cc2-cb52865e504c-kube-api-access-jxw5l\") pod \"downloads-7954f5f757-m55qb\" (UID: \"8da1d50c-554e-4e30-8cc2-cb52865e504c\") " pod="openshift-console/downloads-7954f5f757-m55qb" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181539 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181553 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181570 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-encryption-config\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181583 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-audit-policies\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181598 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9sdm\" (UniqueName: \"kubernetes.io/projected/00c5206f-41e3-4fc3-851e-febfc74613a3-kube-api-access-k9sdm\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181639 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-client-ca\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181657 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b43f77f-6f91-4311-a016-6fbb58510112-audit-dir\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.182306 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.181675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184549 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg8bh\" (UniqueName: \"kubernetes.io/projected/4408517a-3bf8-4e25-956c-04460df20b30-kube-api-access-vg8bh\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184773 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-audit\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184794 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-audit-dir\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184812 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-image-import-ca\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184829 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-audit-policies\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184846 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-node-pullsecrets\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184862 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184899 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6rq2\" (UniqueName: \"kubernetes.io/projected/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-kube-api-access-q6rq2\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.184924 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189046 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189124 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-etcd-client\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189155 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189190 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-serving-cert\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189217 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-config\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189262 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-encryption-config\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189284 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189307 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-serving-cert\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189334 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189360 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-config\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.189385 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4408517a-3bf8-4e25-956c-04460df20b30-audit-dir\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.201651 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-s6zqr"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.204262 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wq8xw"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.204572 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.204909 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.205230 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.205774 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221211 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221354 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221428 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221505 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221610 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221746 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221820 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.221848 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.222065 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.222173 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.222266 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.223661 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7gp42"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.224407 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.232284 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.232522 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.232657 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.232818 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.232976 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.233092 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.233222 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-25ddd"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.233721 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.233741 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.233808 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.233961 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.234098 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.234168 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.234655 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.266989 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.267749 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.269026 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.266974 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.269539 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.271092 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.271331 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.274737 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.276653 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.276814 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.277314 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.298190 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.298540 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.298677 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.298798 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.298955 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.299473 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.301196 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.303900 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.304058 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.304525 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.304754 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.305061 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.307499 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.307499 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309119 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309330 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309559 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-audit-policies\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309583 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-node-pullsecrets\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309604 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c63a52e2-6fdd-4b79-b054-669bcc611dcb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309624 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309642 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6rq2\" (UniqueName: \"kubernetes.io/projected/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-kube-api-access-q6rq2\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309657 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309672 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63a52e2-6fdd-4b79-b054-669bcc611dcb-config\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309689 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-serving-cert\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309707 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309673 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309776 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-etcd-client\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309799 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309819 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5zj9\" (UniqueName: \"kubernetes.io/projected/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-kube-api-access-j5zj9\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309882 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7a8080-f368-4138-911b-b9d6e4c84ea1-serving-cert\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309899 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-serving-cert\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309917 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/959d6a98-e62b-425b-910b-d51ad4a01d1a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309964 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-serving-cert\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309982 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-config\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.309998 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thbsw\" (UniqueName: \"kubernetes.io/projected/4e7a8080-f368-4138-911b-b9d6e4c84ea1-kube-api-access-thbsw\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.310195 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mlxbs"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.310738 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.311268 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-audit-policies\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.311314 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-node-pullsecrets\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.312099 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.312819 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.317526 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.318292 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.318537 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.318767 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.319033 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.319250 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.319748 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.319795 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.319929 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320139 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320229 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-encryption-config\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320296 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3e72ed1-5b4f-483c-8029-a71db619acce-config\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320462 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-serving-cert\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320530 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-config\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-serving-cert\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320620 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320650 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320675 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-config\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320702 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4408517a-3bf8-4e25-956c-04460df20b30-audit-dir\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320726 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-ca\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320746 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-config\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320767 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-trusted-ca\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320790 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320811 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-serving-cert\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320831 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-config\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320850 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-trusted-ca-bundle\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320871 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-auth-proxy-config\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320892 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-service-ca\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320919 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320963 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69dbx\" (UniqueName: \"kubernetes.io/projected/c63a52e2-6fdd-4b79-b054-669bcc611dcb-kube-api-access-69dbx\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.320988 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3e72ed1-5b4f-483c-8029-a71db619acce-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321024 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-oauth-config\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321048 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-etcd-client\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321071 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/959d6a98-e62b-425b-910b-d51ad4a01d1a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321090 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-oauth-serving-cert\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321110 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr585\" (UniqueName: \"kubernetes.io/projected/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-kube-api-access-hr585\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321129 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-machine-approver-tls\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321152 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321174 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92gdb\" (UniqueName: \"kubernetes.io/projected/2b43f77f-6f91-4311-a016-6fbb58510112-kube-api-access-92gdb\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321195 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnccr\" (UniqueName: \"kubernetes.io/projected/3ffc01dc-2fb5-4474-8f30-acf0ef50441d-kube-api-access-pnccr\") pod \"dns-operator-744455d44c-b9jzq\" (UID: \"3ffc01dc-2fb5-4474-8f30-acf0ef50441d\") " pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321216 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00c5206f-41e3-4fc3-851e-febfc74613a3-serving-cert\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321236 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321257 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-etcd-serving-ca\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321280 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxw5l\" (UniqueName: \"kubernetes.io/projected/8da1d50c-554e-4e30-8cc2-cb52865e504c-kube-api-access-jxw5l\") pod \"downloads-7954f5f757-m55qb\" (UID: \"8da1d50c-554e-4e30-8cc2-cb52865e504c\") " pod="openshift-console/downloads-7954f5f757-m55qb" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321302 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321325 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321348 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321375 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-encryption-config\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321400 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-audit-policies\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321422 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321443 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk8gh\" (UniqueName: \"kubernetes.io/projected/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-kube-api-access-gk8gh\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321466 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grp9r\" (UniqueName: \"kubernetes.io/projected/c3e72ed1-5b4f-483c-8029-a71db619acce-kube-api-access-grp9r\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321485 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-config\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321507 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-config\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321529 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9sdm\" (UniqueName: \"kubernetes.io/projected/00c5206f-41e3-4fc3-851e-febfc74613a3-kube-api-access-k9sdm\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321554 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tt5c\" (UniqueName: \"kubernetes.io/projected/959d6a98-e62b-425b-910b-d51ad4a01d1a-kube-api-access-7tt5c\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321574 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lntx4\" (UniqueName: \"kubernetes.io/projected/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-kube-api-access-lntx4\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321594 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-client\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321612 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-client-ca\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321638 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-client-ca\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-service-ca\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321677 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-config\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321695 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3ffc01dc-2fb5-4474-8f30-acf0ef50441d-metrics-tls\") pod \"dns-operator-744455d44c-b9jzq\" (UID: \"3ffc01dc-2fb5-4474-8f30-acf0ef50441d\") " pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321718 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b43f77f-6f91-4311-a016-6fbb58510112-audit-dir\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321740 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321760 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg8bh\" (UniqueName: \"kubernetes.io/projected/4408517a-3bf8-4e25-956c-04460df20b30-kube-api-access-vg8bh\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321781 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c63a52e2-6fdd-4b79-b054-669bcc611dcb-images\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321807 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-audit\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321826 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-audit-dir\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.321847 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-image-import-ca\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.322498 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-etcd-client\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.323713 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.324308 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.324882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.325551 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.325561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-etcd-serving-ca\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.326471 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-image-import-ca\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.328555 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4408517a-3bf8-4e25-956c-04460df20b30-audit-dir\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.328859 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-client-ca\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.329320 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.329858 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b43f77f-6f91-4311-a016-6fbb58510112-audit-dir\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.330377 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.331042 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.331295 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.332450 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.332446 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-audit-dir\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.343861 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.348246 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-audit\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.349159 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.351291 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.351300 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-serving-cert\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.351371 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.351520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-config\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.352027 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00c5206f-41e3-4fc3-851e-febfc74613a3-serving-cert\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.355487 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.355784 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.358801 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-encryption-config\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.358813 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.359092 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.362348 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q68ll"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.364032 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-prs7q"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.364644 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.364686 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.364721 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.367265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-etcd-client\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.367482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4408517a-3bf8-4e25-956c-04460df20b30-audit-policies\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.367910 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4408517a-3bf8-4e25-956c-04460df20b30-encryption-config\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.374393 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.375275 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.375300 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.376248 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.376601 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.391082 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g96zn"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.394665 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.401959 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.402751 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.409267 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pw275"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.409847 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.410178 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.410399 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.413801 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.418689 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.421328 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.421946 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422394 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-oauth-config\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422430 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-oauth-serving-cert\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422454 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr585\" (UniqueName: \"kubernetes.io/projected/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-kube-api-access-hr585\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422474 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-machine-approver-tls\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/959d6a98-e62b-425b-910b-d51ad4a01d1a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422519 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnccr\" (UniqueName: \"kubernetes.io/projected/3ffc01dc-2fb5-4474-8f30-acf0ef50441d-kube-api-access-pnccr\") pod \"dns-operator-744455d44c-b9jzq\" (UID: \"3ffc01dc-2fb5-4474-8f30-acf0ef50441d\") " pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422541 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk8gh\" (UniqueName: \"kubernetes.io/projected/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-kube-api-access-gk8gh\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422609 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grp9r\" (UniqueName: \"kubernetes.io/projected/c3e72ed1-5b4f-483c-8029-a71db619acce-kube-api-access-grp9r\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422632 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-config\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422652 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-config\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422679 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tt5c\" (UniqueName: \"kubernetes.io/projected/959d6a98-e62b-425b-910b-d51ad4a01d1a-kube-api-access-7tt5c\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422703 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lntx4\" (UniqueName: \"kubernetes.io/projected/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-kube-api-access-lntx4\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422723 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-service-ca\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422739 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-client\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422757 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-client-ca\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422784 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-config\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422810 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c63a52e2-6fdd-4b79-b054-669bcc611dcb-images\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3ffc01dc-2fb5-4474-8f30-acf0ef50441d-metrics-tls\") pod \"dns-operator-744455d44c-b9jzq\" (UID: \"3ffc01dc-2fb5-4474-8f30-acf0ef50441d\") " pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422857 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c63a52e2-6fdd-4b79-b054-669bcc611dcb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422890 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f4b7293-0efa-48e5-8406-f6a196867ad2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422914 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63a52e2-6fdd-4b79-b054-669bcc611dcb-config\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422954 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-serving-cert\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.422979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5zj9\" (UniqueName: \"kubernetes.io/projected/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-kube-api-access-j5zj9\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423001 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/959d6a98-e62b-425b-910b-d51ad4a01d1a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423020 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7a8080-f368-4138-911b-b9d6e4c84ea1-serving-cert\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-serving-cert\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423064 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f4b7293-0efa-48e5-8406-f6a196867ad2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423087 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thbsw\" (UniqueName: \"kubernetes.io/projected/4e7a8080-f368-4138-911b-b9d6e4c84ea1-kube-api-access-thbsw\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423109 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f4b7293-0efa-48e5-8406-f6a196867ad2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423140 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3e72ed1-5b4f-483c-8029-a71db619acce-config\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423167 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-trusted-ca\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423191 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-ca\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423210 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-config\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423229 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-trusted-ca-bundle\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423249 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-auth-proxy-config\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.421948 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423274 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-serving-cert\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423294 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-config\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423324 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69dbx\" (UniqueName: \"kubernetes.io/projected/c63a52e2-6fdd-4b79-b054-669bcc611dcb-kube-api-access-69dbx\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423345 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-service-ca\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423375 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/49c2f80b-4c99-4b71-b200-c5148ca07dce-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-967q2\" (UID: \"49c2f80b-4c99-4b71-b200-c5148ca07dce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423401 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3e72ed1-5b4f-483c-8029-a71db619acce-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvq74\" (UniqueName: \"kubernetes.io/projected/49c2f80b-4c99-4b71-b200-c5148ca07dce-kube-api-access-fvq74\") pod \"cluster-samples-operator-665b6dd947-967q2\" (UID: \"49c2f80b-4c99-4b71-b200-c5148ca07dce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.423704 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.425569 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c63a52e2-6fdd-4b79-b054-669bcc611dcb-config\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.425955 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-config\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.426266 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-config\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.426924 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nj4vs"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.427547 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.428180 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.428457 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-oauth-serving-cert\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.428474 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.428507 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.429380 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-trusted-ca\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.430466 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-oauth-config\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.431161 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-ca\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.431859 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.432422 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-config\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.433002 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3e72ed1-5b4f-483c-8029-a71db619acce-config\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.433606 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-service-ca\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.434797 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-client-ca\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.436455 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c63a52e2-6fdd-4b79-b054-669bcc611dcb-images\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.436683 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c63a52e2-6fdd-4b79-b054-669bcc611dcb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.437491 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/959d6a98-e62b-425b-910b-d51ad4a01d1a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.437678 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-serving-cert\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.438451 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3ffc01dc-2fb5-4474-8f30-acf0ef50441d-metrics-tls\") pod \"dns-operator-744455d44c-b9jzq\" (UID: \"3ffc01dc-2fb5-4474-8f30-acf0ef50441d\") " pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.439621 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7a8080-f368-4138-911b-b9d6e4c84ea1-serving-cert\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.440442 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.440652 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.441615 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-serving-cert\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.442647 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.442893 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.444205 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.446975 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dcbr8"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.447696 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-machine-approver-tls\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.448217 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-config\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.449562 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-auth-proxy-config\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.449922 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3e72ed1-5b4f-483c-8029-a71db619acce-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.450205 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-config\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.450799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.450872 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-trusted-ca-bundle\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.451336 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-service-ca\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.451478 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.452474 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/959d6a98-e62b-425b-910b-d51ad4a01d1a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.455198 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-z67kq"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.457444 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-serving-cert\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.458190 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-etcd-client\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.462654 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.464150 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.464284 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.465095 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.466683 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.467951 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.469104 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-b9jzq"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.470265 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-25ddd"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.471442 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-m55qb"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.472023 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.472850 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.474061 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wq8xw"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.475649 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.478535 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.481972 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.485856 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.487334 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-xm7wg"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.489568 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rcnhl"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.491227 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7gp42"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.492105 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.492862 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.494611 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mlxbs"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.496169 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-s6zqr"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.497598 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.499140 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.500505 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.502030 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.503457 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.504795 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.507917 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7dcg9"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.510385 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pw275"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.511135 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q68ll"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.512105 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.512374 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-hrg7n"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.513402 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.515865 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-g88c2"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.516676 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.520146 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.521623 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.523257 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.524173 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f4b7293-0efa-48e5-8406-f6a196867ad2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.524221 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f4b7293-0efa-48e5-8406-f6a196867ad2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.524279 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/49c2f80b-4c99-4b71-b200-c5148ca07dce-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-967q2\" (UID: \"49c2f80b-4c99-4b71-b200-c5148ca07dce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.524313 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvq74\" (UniqueName: \"kubernetes.io/projected/49c2f80b-4c99-4b71-b200-c5148ca07dce-kube-api-access-fvq74\") pod \"cluster-samples-operator-665b6dd947-967q2\" (UID: \"49c2f80b-4c99-4b71-b200-c5148ca07dce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.524431 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nj4vs"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.524448 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f4b7293-0efa-48e5-8406-f6a196867ad2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.525043 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f4b7293-0efa-48e5-8406-f6a196867ad2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.527326 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f4b7293-0efa-48e5-8406-f6a196867ad2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.528251 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-z67kq"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.530928 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-g88c2"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.531669 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hrg7n"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.532545 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.534747 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-mvhcg"] Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.535611 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.536375 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/49c2f80b-4c99-4b71-b200-c5148ca07dce-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-967q2\" (UID: \"49c2f80b-4c99-4b71-b200-c5148ca07dce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.552656 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.573109 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.597387 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.611610 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.632069 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.653133 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.672514 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.692085 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.712917 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.732303 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.752102 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.772740 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.798054 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.811453 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.832752 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.852267 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.872270 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.906200 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6rq2\" (UniqueName: \"kubernetes.io/projected/6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df-kube-api-access-q6rq2\") pod \"apiserver-76f77b778f-g96zn\" (UID: \"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df\") " pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.926698 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92gdb\" (UniqueName: \"kubernetes.io/projected/2b43f77f-6f91-4311-a016-6fbb58510112-kube-api-access-92gdb\") pod \"oauth-openshift-558db77b4-dcbr8\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.947459 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxw5l\" (UniqueName: \"kubernetes.io/projected/8da1d50c-554e-4e30-8cc2-cb52865e504c-kube-api-access-jxw5l\") pod \"downloads-7954f5f757-m55qb\" (UID: \"8da1d50c-554e-4e30-8cc2-cb52865e504c\") " pod="openshift-console/downloads-7954f5f757-m55qb" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.979221 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg8bh\" (UniqueName: \"kubernetes.io/projected/4408517a-3bf8-4e25-956c-04460df20b30-kube-api-access-vg8bh\") pod \"apiserver-7bbb656c7d-qvchd\" (UID: \"4408517a-3bf8-4e25-956c-04460df20b30\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:57 crc kubenswrapper[4747]: I0202 08:58:57.995818 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.013471 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9sdm\" (UniqueName: \"kubernetes.io/projected/00c5206f-41e3-4fc3-851e-febfc74613a3-kube-api-access-k9sdm\") pod \"route-controller-manager-6576b87f9c-9brdq\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.032427 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.037600 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.052452 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.060389 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.068255 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a76d79a0b5907af114496510a39459c2b3724f81958a90e6f864866277115a43"} Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.068334 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"4dd4d36698eb4805e9f5df5ca02f98aaa8daef221007f1c177698828c1a3b1d6"} Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.075857 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.076088 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"04924f97dc110f6bf8047ffc57697bdcdb6c14d719af15bf0526ab20aeb59ce9"} Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.076147 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"cb7a3ee00496554677b209d8d19b27b09674415374122cbc2d34f88de8654497"} Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.078170 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1e0cd4a382330db5849a8470aea4c605d04691ba5801f4b7f021c90d780bde2b"} Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.078202 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d2fde4d7b26a62338b96f5c79824356dd071b5952ba16744f23f8c021b0f8e67"} Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.078401 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.092526 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.105992 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.115045 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.133421 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.134844 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-m55qb" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.152310 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.172180 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.187711 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd"] Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.192021 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 08:58:58 crc kubenswrapper[4747]: W0202 08:58:58.205060 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4408517a_3bf8_4e25_956c_04460df20b30.slice/crio-0ba8338220b135cf54b8ed982968be1400376d010cd28d7b4204dd4d93b81402 WatchSource:0}: Error finding container 0ba8338220b135cf54b8ed982968be1400376d010cd28d7b4204dd4d93b81402: Status 404 returned error can't find the container with id 0ba8338220b135cf54b8ed982968be1400376d010cd28d7b4204dd4d93b81402 Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.213770 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.232505 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.253189 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.266867 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g96zn"] Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.273495 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.287285 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dcbr8"] Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.293138 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 08:58:58 crc kubenswrapper[4747]: W0202 08:58:58.308297 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cfa55e8_7919_4d29_8b2c_3e9dc46dc3df.slice/crio-62d6f526f16bea30b239da8d1c60b5c641a9365c7e5dcd959782343046be92f4 WatchSource:0}: Error finding container 62d6f526f16bea30b239da8d1c60b5c641a9365c7e5dcd959782343046be92f4: Status 404 returned error can't find the container with id 62d6f526f16bea30b239da8d1c60b5c641a9365c7e5dcd959782343046be92f4 Feb 02 08:58:58 crc kubenswrapper[4747]: W0202 08:58:58.310179 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b43f77f_6f91_4311_a016_6fbb58510112.slice/crio-d672d043fb9dcfdb66ac4f1ad1c267ef2e94d307ae34feacecccfa3a207c1d29 WatchSource:0}: Error finding container d672d043fb9dcfdb66ac4f1ad1c267ef2e94d307ae34feacecccfa3a207c1d29: Status 404 returned error can't find the container with id d672d043fb9dcfdb66ac4f1ad1c267ef2e94d307ae34feacecccfa3a207c1d29 Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.313274 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.330519 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq"] Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.334492 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.353208 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: W0202 08:58:58.361582 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00c5206f_41e3_4fc3_851e_febfc74613a3.slice/crio-c9afce8839087d81921a6d5a1c92181b8a00f3a6ff2ddd105a018b4722e8d1c2 WatchSource:0}: Error finding container c9afce8839087d81921a6d5a1c92181b8a00f3a6ff2ddd105a018b4722e8d1c2: Status 404 returned error can't find the container with id c9afce8839087d81921a6d5a1c92181b8a00f3a6ff2ddd105a018b4722e8d1c2 Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.372396 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.390904 4747 request.go:700] Waited for 1.007549716s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/secrets?fieldSelector=metadata.name%3Dimage-registry-operator-tls&limit=500&resourceVersion=0 Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.392413 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.392686 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-m55qb"] Feb 02 08:58:58 crc kubenswrapper[4747]: W0202 08:58:58.402071 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8da1d50c_554e_4e30_8cc2_cb52865e504c.slice/crio-0a83e734ef5bc87551cfcfab38629d2a87a41a7ad6e59a7dec8e72c5c167ce50 WatchSource:0}: Error finding container 0a83e734ef5bc87551cfcfab38629d2a87a41a7ad6e59a7dec8e72c5c167ce50: Status 404 returned error can't find the container with id 0a83e734ef5bc87551cfcfab38629d2a87a41a7ad6e59a7dec8e72c5c167ce50 Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.412320 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.432312 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.451999 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.477205 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.492203 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.512649 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.532804 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.553134 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.573241 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.592418 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.613231 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.634319 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.654484 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.673631 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.692545 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.711848 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.732755 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.772472 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.772628 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5zj9\" (UniqueName: \"kubernetes.io/projected/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-kube-api-access-j5zj9\") pod \"console-f9d7485db-s6zqr\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.793589 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.813006 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.833144 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.852039 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.872880 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.891873 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.912848 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.932100 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.965963 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr585\" (UniqueName: \"kubernetes.io/projected/1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873-kube-api-access-hr585\") pod \"console-operator-58897d9998-rcnhl\" (UID: \"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873\") " pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.972112 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.983124 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:58:58 crc kubenswrapper[4747]: I0202 08:58:58.990272 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.007657 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thbsw\" (UniqueName: \"kubernetes.io/projected/4e7a8080-f368-4138-911b-b9d6e4c84ea1-kube-api-access-thbsw\") pod \"controller-manager-879f6c89f-wq8xw\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.027312 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tt5c\" (UniqueName: \"kubernetes.io/projected/959d6a98-e62b-425b-910b-d51ad4a01d1a-kube-api-access-7tt5c\") pod \"openshift-controller-manager-operator-756b6f6bc6-8tmzw\" (UID: \"959d6a98-e62b-425b-910b-d51ad4a01d1a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.051634 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lntx4\" (UniqueName: \"kubernetes.io/projected/92ad06d4-69fe-4a8f-a7b1-57352159c0eb-kube-api-access-lntx4\") pod \"etcd-operator-b45778765-7dcg9\" (UID: \"92ad06d4-69fe-4a8f-a7b1-57352159c0eb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.051712 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.141100 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.144074 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.159802 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" event={"ID":"00c5206f-41e3-4fc3-851e-febfc74613a3","Type":"ContainerStarted","Data":"1954b223abd2279b745f6f5eab08cf069a10991fcde602eeddaefc1167d23b01"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.160378 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" event={"ID":"00c5206f-41e3-4fc3-851e-febfc74613a3","Type":"ContainerStarted","Data":"c9afce8839087d81921a6d5a1c92181b8a00f3a6ff2ddd105a018b4722e8d1c2"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.161093 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.167136 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69dbx\" (UniqueName: \"kubernetes.io/projected/c63a52e2-6fdd-4b79-b054-669bcc611dcb-kube-api-access-69dbx\") pod \"machine-api-operator-5694c8668f-xm7wg\" (UID: \"c63a52e2-6fdd-4b79-b054-669bcc611dcb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.168208 4747 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-9brdq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.168524 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" podUID="00c5206f-41e3-4fc3-851e-febfc74613a3" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.169007 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" event={"ID":"2b43f77f-6f91-4311-a016-6fbb58510112","Type":"ContainerStarted","Data":"3705a77e3fc1e727cb84efe5a3fd4b9dcd69dbb199caab6661f009357ec06b1f"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.169072 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" event={"ID":"2b43f77f-6f91-4311-a016-6fbb58510112","Type":"ContainerStarted","Data":"d672d043fb9dcfdb66ac4f1ad1c267ef2e94d307ae34feacecccfa3a207c1d29"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.169229 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.171364 4747 generic.go:334] "Generic (PLEG): container finished" podID="6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df" containerID="5cf6377f99ed7b90b2802183e540bbe263f5d48f08c4d40059692c7f3241b3b6" exitCode=0 Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.171382 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnccr\" (UniqueName: \"kubernetes.io/projected/3ffc01dc-2fb5-4474-8f30-acf0ef50441d-kube-api-access-pnccr\") pod \"dns-operator-744455d44c-b9jzq\" (UID: \"3ffc01dc-2fb5-4474-8f30-acf0ef50441d\") " pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.171512 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" event={"ID":"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df","Type":"ContainerDied","Data":"5cf6377f99ed7b90b2802183e540bbe263f5d48f08c4d40059692c7f3241b3b6"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.171569 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" event={"ID":"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df","Type":"ContainerStarted","Data":"62d6f526f16bea30b239da8d1c60b5c641a9365c7e5dcd959782343046be92f4"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.173494 4747 generic.go:334] "Generic (PLEG): container finished" podID="4408517a-3bf8-4e25-956c-04460df20b30" containerID="9b6a6c6f9966347126fcdde73bd5758e9e9c4927249128ba202688f9c8e76614" exitCode=0 Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.173530 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" event={"ID":"4408517a-3bf8-4e25-956c-04460df20b30","Type":"ContainerDied","Data":"9b6a6c6f9966347126fcdde73bd5758e9e9c4927249128ba202688f9c8e76614"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.173561 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" event={"ID":"4408517a-3bf8-4e25-956c-04460df20b30","Type":"ContainerStarted","Data":"0ba8338220b135cf54b8ed982968be1400376d010cd28d7b4204dd4d93b81402"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.175229 4747 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-dcbr8 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.6:6443/healthz\": dial tcp 10.217.0.6:6443: connect: connection refused" start-of-body= Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.175256 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.6:6443/healthz\": dial tcp 10.217.0.6:6443: connect: connection refused" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.176338 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grp9r\" (UniqueName: \"kubernetes.io/projected/c3e72ed1-5b4f-483c-8029-a71db619acce-kube-api-access-grp9r\") pod \"openshift-apiserver-operator-796bbdcf4f-kz8jx\" (UID: \"c3e72ed1-5b4f-483c-8029-a71db619acce\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.176374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-m55qb" event={"ID":"8da1d50c-554e-4e30-8cc2-cb52865e504c","Type":"ContainerStarted","Data":"d29979c079c293651467508ea6a46a18d3a37aca77d953453e31c37df2856702"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.176423 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-m55qb" event={"ID":"8da1d50c-554e-4e30-8cc2-cb52865e504c","Type":"ContainerStarted","Data":"0a83e734ef5bc87551cfcfab38629d2a87a41a7ad6e59a7dec8e72c5c167ce50"} Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.176650 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-m55qb" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.181336 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-m55qb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.181370 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m55qb" podUID="8da1d50c-554e-4e30-8cc2-cb52865e504c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.183513 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.186716 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk8gh\" (UniqueName: \"kubernetes.io/projected/e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1-kube-api-access-gk8gh\") pod \"machine-approver-56656f9798-swbbc\" (UID: \"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.193050 4747 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.205542 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.212502 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.232429 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.233783 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-s6zqr"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.237201 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.253139 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 08:58:59 crc kubenswrapper[4747]: W0202 08:58:59.265333 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51fdceee_2c8b_4830_a3c9_8f02a0e36cc3.slice/crio-f23128c7d79832ccfb536da9288284c5ab9d903b9989c1499c6f5d11795b6084 WatchSource:0}: Error finding container f23128c7d79832ccfb536da9288284c5ab9d903b9989c1499c6f5d11795b6084: Status 404 returned error can't find the container with id f23128c7d79832ccfb536da9288284c5ab9d903b9989c1499c6f5d11795b6084 Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.273297 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.276996 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.291372 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.297130 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rcnhl"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.297527 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.313172 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.332654 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.353166 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.373800 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.392080 4747 request.go:700] Waited for 1.867470142s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-scheduler-operator/serviceaccounts/openshift-kube-scheduler-operator/token Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.411688 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f4b7293-0efa-48e5-8406-f6a196867ad2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gtm8b\" (UID: \"3f4b7293-0efa-48e5-8406-f6a196867ad2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.418998 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7dcg9"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.427505 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.431672 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvq74\" (UniqueName: \"kubernetes.io/projected/49c2f80b-4c99-4b71-b200-c5148ca07dce-kube-api-access-fvq74\") pod \"cluster-samples-operator-665b6dd947-967q2\" (UID: \"49c2f80b-4c99-4b71-b200-c5148ca07dce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.432542 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.452583 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 08:58:59 crc kubenswrapper[4747]: W0202 08:58:59.469752 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92ad06d4_69fe_4a8f_a7b1_57352159c0eb.slice/crio-3fe64257ce8f331ba94ace49677e7d9bb500e3b7a1ac3348fe3e8da191a6f3b6 WatchSource:0}: Error finding container 3fe64257ce8f331ba94ace49677e7d9bb500e3b7a1ac3348fe3e8da191a6f3b6: Status 404 returned error can't find the container with id 3fe64257ce8f331ba94ace49677e7d9bb500e3b7a1ac3348fe3e8da191a6f3b6 Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.472870 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.473344 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-b9jzq"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.477770 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" Feb 02 08:58:59 crc kubenswrapper[4747]: W0202 08:58:59.480559 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ffc01dc_2fb5_4474_8f30_acf0ef50441d.slice/crio-4ab121964b21e1e968fbeb7f7aced54835676a2dbd720881abd1b79fb981b3ea WatchSource:0}: Error finding container 4ab121964b21e1e968fbeb7f7aced54835676a2dbd720881abd1b79fb981b3ea: Status 404 returned error can't find the container with id 4ab121964b21e1e968fbeb7f7aced54835676a2dbd720881abd1b79fb981b3ea Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.544211 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553519 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6aefeb9f-c93c-4943-82be-7c02f9366c65-trusted-ca\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553559 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-bound-sa-token\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553640 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553674 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00093396-6c0c-432f-99e3-f1c059a3f83a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553711 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pff7t\" (UniqueName: \"kubernetes.io/projected/212c87fc-6e91-409c-a18d-932af9399ac8-kube-api-access-pff7t\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553729 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8efa0848-1d58-46b7-896e-83503c62e712-proxy-tls\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553746 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-trusted-ca\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553765 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6aefeb9f-c93c-4943-82be-7c02f9366c65-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553787 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8efa0848-1d58-46b7-896e-83503c62e712-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553822 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/212c87fc-6e91-409c-a18d-932af9399ac8-serving-cert\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553857 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/00093396-6c0c-432f-99e3-f1c059a3f83a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.553999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225573d2-7c3a-4c04-b1d4-9971e2145c1e-config\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554026 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6aefeb9f-c93c-4943-82be-7c02f9366c65-metrics-tls\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554055 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/943f883b-9752-44f0-b3ba-845f53d4b86e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554083 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-tls\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554102 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-config\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554135 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdfkx\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-kube-api-access-xdfkx\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554154 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/225573d2-7c3a-4c04-b1d4-9971e2145c1e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554174 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00093396-6c0c-432f-99e3-f1c059a3f83a-config\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554193 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtlwl\" (UniqueName: \"kubernetes.io/projected/f6fb2719-3a65-4654-b670-9999c570d564-kube-api-access-vtlwl\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554223 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554241 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f6fb2719-3a65-4654-b670-9999c570d564-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554269 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554307 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/943f883b-9752-44f0-b3ba-845f53d4b86e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-certificates\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554360 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/225573d2-7c3a-4c04-b1d4-9971e2145c1e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554381 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-service-ca-bundle\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554403 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc2kh\" (UniqueName: \"kubernetes.io/projected/6aefeb9f-c93c-4943-82be-7c02f9366c65-kube-api-access-sc2kh\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554422 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbbkz\" (UniqueName: \"kubernetes.io/projected/8efa0848-1d58-46b7-896e-83503c62e712-kube-api-access-jbbkz\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.554441 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6fb2719-3a65-4654-b670-9999c570d564-serving-cert\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: E0202 08:58:59.556880 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.056866398 +0000 UTC m=+152.601204921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:58:59 crc kubenswrapper[4747]: W0202 08:58:59.593080 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod959d6a98_e62b_425b_910b_d51ad4a01d1a.slice/crio-d5ae700e3d7e973be9f73a3db34cc60018f7ec5abbc289a3b9067dd1168f4219 WatchSource:0}: Error finding container d5ae700e3d7e973be9f73a3db34cc60018f7ec5abbc289a3b9067dd1168f4219: Status 404 returned error can't find the container with id d5ae700e3d7e973be9f73a3db34cc60018f7ec5abbc289a3b9067dd1168f4219 Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.624973 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wq8xw"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.634918 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.646408 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.654870 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:58:59 crc kubenswrapper[4747]: E0202 08:58:59.654977 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.15495648 +0000 UTC m=+152.699294913 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655105 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d446bb5-3d8d-4a2f-8e86-68630e466d82-cert\") pod \"ingress-canary-g88c2\" (UID: \"4d446bb5-3d8d-4a2f-8e86-68630e466d82\") " pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655127 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwlvb\" (UniqueName: \"kubernetes.io/projected/4d446bb5-3d8d-4a2f-8e86-68630e466d82-kube-api-access-lwlvb\") pod \"ingress-canary-g88c2\" (UID: \"4d446bb5-3d8d-4a2f-8e86-68630e466d82\") " pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655151 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-tls\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655167 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-config\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655190 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/225573d2-7c3a-4c04-b1d4-9971e2145c1e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655298 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fzrb\" (UniqueName: \"kubernetes.io/projected/cc59d9b6-8e68-4209-8e08-5a10b5a77620-kube-api-access-4fzrb\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655332 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00093396-6c0c-432f-99e3-f1c059a3f83a-config\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655349 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e9552881-bebb-461b-b37a-3ce4f0a4de8f-signing-cabundle\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4f791e66-ece7-47e4-a9bb-71c940fc336d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tmxxh\" (UID: \"4f791e66-ece7-47e4-a9bb-71c940fc336d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655390 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-default-certificate\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655415 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f6fb2719-3a65-4654-b670-9999c570d564-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655439 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-registration-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655472 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655493 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a7707651-619b-45a6-ab81-3b7416f4b6dd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655525 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-certificates\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655547 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-service-ca-bundle\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655582 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-stats-auth\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655598 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2d11e316-3b77-4c43-ba00-4bd026183952-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q68ll\" (UID: \"2d11e316-3b77-4c43-ba00-4bd026183952\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-tmpfs\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655633 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2779189e-5fcd-4f4d-8123-f08906f94d42-config\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655701 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.655731 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2779189e-5fcd-4f4d-8123-f08906f94d42-serving-cert\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.656219 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-config\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.656221 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d91844f8-e11c-4a91-86ae-be01d3d901fe-secret-volume\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.656265 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-mountpoint-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.656285 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-plugins-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.656302 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657000 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-service-ca-bundle\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657086 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f6fb2719-3a65-4654-b670-9999c570d564-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00093396-6c0c-432f-99e3-f1c059a3f83a-config\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657395 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsmgk\" (UniqueName: \"kubernetes.io/projected/e96fc5c0-9b3a-46ae-a43b-8a5effe1d897-kube-api-access-qsmgk\") pod \"migrator-59844c95c7-gr5sp\" (UID: \"e96fc5c0-9b3a-46ae-a43b-8a5effe1d897\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657479 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-certificates\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657710 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkc2k\" (UniqueName: \"kubernetes.io/projected/f05f69ab-11cf-44f5-84b7-d9387c51e162-kube-api-access-qkc2k\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657753 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f52303ce-0d76-4bda-af76-9d78676487bd-images\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657779 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f05f69ab-11cf-44f5-84b7-d9387c51e162-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: E0202 08:58:59.657830 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.157809573 +0000 UTC m=+152.702148126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657857 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8efa0848-1d58-46b7-896e-83503c62e712-proxy-tls\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657889 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-socket-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657913 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6aefeb9f-c93c-4943-82be-7c02f9366c65-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657956 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7922\" (UniqueName: \"kubernetes.io/projected/17e230d9-1bcc-4715-bb34-d36181eda723-kube-api-access-n7922\") pod \"package-server-manager-789f6589d5-z2hbw\" (UID: \"17e230d9-1bcc-4715-bb34-d36181eda723\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.657979 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-metrics-certs\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.658007 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f52303ce-0d76-4bda-af76-9d78676487bd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.658028 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/17e230d9-1bcc-4715-bb34-d36181eda723-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-z2hbw\" (UID: \"17e230d9-1bcc-4715-bb34-d36181eda723\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.658052 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kzqv\" (UniqueName: \"kubernetes.io/projected/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-kube-api-access-9kzqv\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.658081 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d91844f8-e11c-4a91-86ae-be01d3d901fe-config-volume\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.658477 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0f91531c-29da-4a1a-869d-66ad2de9e055-node-bootstrap-token\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.658520 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cc59d9b6-8e68-4209-8e08-5a10b5a77620-srv-cert\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.663506 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-tls\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.664047 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8efa0848-1d58-46b7-896e-83503c62e712-proxy-tls\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.664675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq79q\" (UniqueName: \"kubernetes.io/projected/5a855422-3b25-4bdd-9df5-10a39f02225e-kube-api-access-vq79q\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.664716 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05f69ab-11cf-44f5-84b7-d9387c51e162-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.664844 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f52303ce-0d76-4bda-af76-9d78676487bd-proxy-tls\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.668722 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/225573d2-7c3a-4c04-b1d4-9971e2145c1e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.669020 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdfkx\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-kube-api-access-xdfkx\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.669194 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtlwl\" (UniqueName: \"kubernetes.io/projected/f6fb2719-3a65-4654-b670-9999c570d564-kube-api-access-vtlwl\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.669231 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2jgj\" (UniqueName: \"kubernetes.io/projected/4f791e66-ece7-47e4-a9bb-71c940fc336d-kube-api-access-k2jgj\") pod \"control-plane-machine-set-operator-78cbb6b69f-tmxxh\" (UID: \"4f791e66-ece7-47e4-a9bb-71c940fc336d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.669865 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.670451 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/943f883b-9752-44f0-b3ba-845f53d4b86e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.670486 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-csi-data-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.671401 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/225573d2-7c3a-4c04-b1d4-9971e2145c1e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.671429 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc2kh\" (UniqueName: \"kubernetes.io/projected/6aefeb9f-c93c-4943-82be-7c02f9366c65-kube-api-access-sc2kh\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.671455 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbbkz\" (UniqueName: \"kubernetes.io/projected/8efa0848-1d58-46b7-896e-83503c62e712-kube-api-access-jbbkz\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.671603 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6fb2719-3a65-4654-b670-9999c570d564-serving-cert\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.671633 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tzpn\" (UniqueName: \"kubernetes.io/projected/2d11e316-3b77-4c43-ba00-4bd026183952-kube-api-access-8tzpn\") pod \"multus-admission-controller-857f4d67dd-q68ll\" (UID: \"2d11e316-3b77-4c43-ba00-4bd026183952\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.672313 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/943f883b-9752-44f0-b3ba-845f53d4b86e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.676145 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0f91531c-29da-4a1a-869d-66ad2de9e055-certs\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.676280 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.676427 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-webhook-cert\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.676532 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-bound-sa-token\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.676555 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6aefeb9f-c93c-4943-82be-7c02f9366c65-trusted-ca\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.676581 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a28be6b3-8255-4ce0-be32-66f19733171a-config-volume\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.677822 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pgb6\" (UniqueName: \"kubernetes.io/projected/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-kube-api-access-9pgb6\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679307 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz6ll\" (UniqueName: \"kubernetes.io/projected/d91844f8-e11c-4a91-86ae-be01d3d901fe-kube-api-access-tz6ll\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679452 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679498 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00093396-6c0c-432f-99e3-f1c059a3f83a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679526 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-trusted-ca\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679661 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pff7t\" (UniqueName: \"kubernetes.io/projected/212c87fc-6e91-409c-a18d-932af9399ac8-kube-api-access-pff7t\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679740 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a855422-3b25-4bdd-9df5-10a39f02225e-service-ca-bundle\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679767 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8efa0848-1d58-46b7-896e-83503c62e712-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679843 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/00093396-6c0c-432f-99e3-f1c059a3f83a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679866 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/212c87fc-6e91-409c-a18d-932af9399ac8-serving-cert\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679926 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e9552881-bebb-461b-b37a-3ce4f0a4de8f-signing-key\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.679993 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb68s\" (UniqueName: \"kubernetes.io/projected/a7707651-619b-45a6-ab81-3b7416f4b6dd-kube-api-access-xb68s\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.680175 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf86c\" (UniqueName: \"kubernetes.io/projected/e9552881-bebb-461b-b37a-3ce4f0a4de8f-kube-api-access-wf86c\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.680223 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gldjq\" (UniqueName: \"kubernetes.io/projected/f52303ce-0d76-4bda-af76-9d78676487bd-kube-api-access-gldjq\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.680249 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwb4z\" (UniqueName: \"kubernetes.io/projected/a28be6b3-8255-4ce0-be32-66f19733171a-kube-api-access-jwb4z\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.681168 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6aefeb9f-c93c-4943-82be-7c02f9366c65-trusted-ca\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.681871 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8efa0848-1d58-46b7-896e-83503c62e712-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.682095 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7rxh\" (UniqueName: \"kubernetes.io/projected/2779189e-5fcd-4f4d-8123-f08906f94d42-kube-api-access-x7rxh\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.682137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a28be6b3-8255-4ce0-be32-66f19733171a-metrics-tls\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.682275 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6fb2719-3a65-4654-b670-9999c570d564-serving-cert\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.682561 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm67j\" (UniqueName: \"kubernetes.io/projected/f8194841-4b1b-4477-8147-cea5ca9d791a-kube-api-access-dm67j\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.682673 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a7707651-619b-45a6-ab81-3b7416f4b6dd-srv-cert\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.682803 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9fs4\" (UniqueName: \"kubernetes.io/projected/0f91531c-29da-4a1a-869d-66ad2de9e055-kube-api-access-x9fs4\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.682903 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225573d2-7c3a-4c04-b1d4-9971e2145c1e-config\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.683013 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.683443 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/212c87fc-6e91-409c-a18d-932af9399ac8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.683616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.683653 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65p6n\" (UniqueName: \"kubernetes.io/projected/42f4cc75-06b6-48f7-95cb-915be0b67e72-kube-api-access-65p6n\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.683758 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6aefeb9f-c93c-4943-82be-7c02f9366c65-metrics-tls\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.683790 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cc59d9b6-8e68-4209-8e08-5a10b5a77620-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.683972 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225573d2-7c3a-4c04-b1d4-9971e2145c1e-config\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.684064 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/212c87fc-6e91-409c-a18d-932af9399ac8-serving-cert\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.684139 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/943f883b-9752-44f0-b3ba-845f53d4b86e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.684243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-trusted-ca\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.687858 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6aefeb9f-c93c-4943-82be-7c02f9366c65-metrics-tls\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.688680 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/943f883b-9752-44f0-b3ba-845f53d4b86e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.695347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00093396-6c0c-432f-99e3-f1c059a3f83a-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.717386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6aefeb9f-c93c-4943-82be-7c02f9366c65-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.737646 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdfkx\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-kube-api-access-xdfkx\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.745759 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtlwl\" (UniqueName: \"kubernetes.io/projected/f6fb2719-3a65-4654-b670-9999c570d564-kube-api-access-vtlwl\") pod \"openshift-config-operator-7777fb866f-7gp42\" (UID: \"f6fb2719-3a65-4654-b670-9999c570d564\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.778747 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/225573d2-7c3a-4c04-b1d4-9971e2145c1e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-88hn7\" (UID: \"225573d2-7c3a-4c04-b1d4-9971e2145c1e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.786664 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788193 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a28be6b3-8255-4ce0-be32-66f19733171a-config-volume\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788234 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pgb6\" (UniqueName: \"kubernetes.io/projected/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-kube-api-access-9pgb6\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788251 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz6ll\" (UniqueName: \"kubernetes.io/projected/d91844f8-e11c-4a91-86ae-be01d3d901fe-kube-api-access-tz6ll\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788275 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a855422-3b25-4bdd-9df5-10a39f02225e-service-ca-bundle\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788301 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e9552881-bebb-461b-b37a-3ce4f0a4de8f-signing-key\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788324 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf86c\" (UniqueName: \"kubernetes.io/projected/e9552881-bebb-461b-b37a-3ce4f0a4de8f-kube-api-access-wf86c\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788340 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gldjq\" (UniqueName: \"kubernetes.io/projected/f52303ce-0d76-4bda-af76-9d78676487bd-kube-api-access-gldjq\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788356 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb68s\" (UniqueName: \"kubernetes.io/projected/a7707651-619b-45a6-ab81-3b7416f4b6dd-kube-api-access-xb68s\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwb4z\" (UniqueName: \"kubernetes.io/projected/a28be6b3-8255-4ce0-be32-66f19733171a-kube-api-access-jwb4z\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7rxh\" (UniqueName: \"kubernetes.io/projected/2779189e-5fcd-4f4d-8123-f08906f94d42-kube-api-access-x7rxh\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788423 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a28be6b3-8255-4ce0-be32-66f19733171a-metrics-tls\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788442 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a7707651-619b-45a6-ab81-3b7416f4b6dd-srv-cert\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788459 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm67j\" (UniqueName: \"kubernetes.io/projected/f8194841-4b1b-4477-8147-cea5ca9d791a-kube-api-access-dm67j\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788482 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9fs4\" (UniqueName: \"kubernetes.io/projected/0f91531c-29da-4a1a-869d-66ad2de9e055-kube-api-access-x9fs4\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788542 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788564 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65p6n\" (UniqueName: \"kubernetes.io/projected/42f4cc75-06b6-48f7-95cb-915be0b67e72-kube-api-access-65p6n\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788592 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cc59d9b6-8e68-4209-8e08-5a10b5a77620-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788613 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d446bb5-3d8d-4a2f-8e86-68630e466d82-cert\") pod \"ingress-canary-g88c2\" (UID: \"4d446bb5-3d8d-4a2f-8e86-68630e466d82\") " pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788627 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwlvb\" (UniqueName: \"kubernetes.io/projected/4d446bb5-3d8d-4a2f-8e86-68630e466d82-kube-api-access-lwlvb\") pod \"ingress-canary-g88c2\" (UID: \"4d446bb5-3d8d-4a2f-8e86-68630e466d82\") " pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788648 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fzrb\" (UniqueName: \"kubernetes.io/projected/cc59d9b6-8e68-4209-8e08-5a10b5a77620-kube-api-access-4fzrb\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788720 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e9552881-bebb-461b-b37a-3ce4f0a4de8f-signing-cabundle\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788751 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4f791e66-ece7-47e4-a9bb-71c940fc336d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tmxxh\" (UID: \"4f791e66-ece7-47e4-a9bb-71c940fc336d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788777 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-default-certificate\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788798 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-registration-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788828 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a7707651-619b-45a6-ab81-3b7416f4b6dd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788857 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2d11e316-3b77-4c43-ba00-4bd026183952-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q68ll\" (UID: \"2d11e316-3b77-4c43-ba00-4bd026183952\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788876 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-tmpfs\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.788897 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-stats-auth\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: E0202 08:58:59.791421 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.291395273 +0000 UTC m=+152.835733876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791402 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2779189e-5fcd-4f4d-8123-f08906f94d42-config\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791501 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2779189e-5fcd-4f4d-8123-f08906f94d42-serving-cert\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791599 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-mountpoint-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791746 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-plugins-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791772 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d91844f8-e11c-4a91-86ae-be01d3d901fe-secret-volume\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791798 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsmgk\" (UniqueName: \"kubernetes.io/projected/e96fc5c0-9b3a-46ae-a43b-8a5effe1d897-kube-api-access-qsmgk\") pod \"migrator-59844c95c7-gr5sp\" (UID: \"e96fc5c0-9b3a-46ae-a43b-8a5effe1d897\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791819 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791849 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkc2k\" (UniqueName: \"kubernetes.io/projected/f05f69ab-11cf-44f5-84b7-d9387c51e162-kube-api-access-qkc2k\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791874 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f52303ce-0d76-4bda-af76-9d78676487bd-images\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791895 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f05f69ab-11cf-44f5-84b7-d9387c51e162-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.791980 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-socket-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792009 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-metrics-certs\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792053 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7922\" (UniqueName: \"kubernetes.io/projected/17e230d9-1bcc-4715-bb34-d36181eda723-kube-api-access-n7922\") pod \"package-server-manager-789f6589d5-z2hbw\" (UID: \"17e230d9-1bcc-4715-bb34-d36181eda723\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f52303ce-0d76-4bda-af76-9d78676487bd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792126 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/17e230d9-1bcc-4715-bb34-d36181eda723-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-z2hbw\" (UID: \"17e230d9-1bcc-4715-bb34-d36181eda723\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792146 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kzqv\" (UniqueName: \"kubernetes.io/projected/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-kube-api-access-9kzqv\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792173 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d91844f8-e11c-4a91-86ae-be01d3d901fe-config-volume\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792202 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0f91531c-29da-4a1a-869d-66ad2de9e055-node-bootstrap-token\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792227 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cc59d9b6-8e68-4209-8e08-5a10b5a77620-srv-cert\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792258 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq79q\" (UniqueName: \"kubernetes.io/projected/5a855422-3b25-4bdd-9df5-10a39f02225e-kube-api-access-vq79q\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792283 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05f69ab-11cf-44f5-84b7-d9387c51e162-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792314 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f52303ce-0d76-4bda-af76-9d78676487bd-proxy-tls\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792350 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2jgj\" (UniqueName: \"kubernetes.io/projected/4f791e66-ece7-47e4-a9bb-71c940fc336d-kube-api-access-k2jgj\") pod \"control-plane-machine-set-operator-78cbb6b69f-tmxxh\" (UID: \"4f791e66-ece7-47e4-a9bb-71c940fc336d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792378 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792410 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2779189e-5fcd-4f4d-8123-f08906f94d42-config\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792425 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-csi-data-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792454 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-xm7wg"] Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-csi-data-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.792571 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tzpn\" (UniqueName: \"kubernetes.io/projected/2d11e316-3b77-4c43-ba00-4bd026183952-kube-api-access-8tzpn\") pod \"multus-admission-controller-857f4d67dd-q68ll\" (UID: \"2d11e316-3b77-4c43-ba00-4bd026183952\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.793066 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-registration-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.793374 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-mountpoint-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.793665 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a855422-3b25-4bdd-9df5-10a39f02225e-service-ca-bundle\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.796000 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d446bb5-3d8d-4a2f-8e86-68630e466d82-cert\") pod \"ingress-canary-g88c2\" (UID: \"4d446bb5-3d8d-4a2f-8e86-68630e466d82\") " pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.796538 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d91844f8-e11c-4a91-86ae-be01d3d901fe-config-volume\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.797370 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-tmpfs\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.797508 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.799622 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/e9552881-bebb-461b-b37a-3ce4f0a4de8f-signing-cabundle\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.801126 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05f69ab-11cf-44f5-84b7-d9387c51e162-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.802685 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-plugins-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.804051 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f8194841-4b1b-4477-8147-cea5ca9d791a-socket-dir\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.804627 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.804688 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a28be6b3-8255-4ce0-be32-66f19733171a-config-volume\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.804914 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f52303ce-0d76-4bda-af76-9d78676487bd-images\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.805614 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f52303ce-0d76-4bda-af76-9d78676487bd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.805704 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.805801 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0f91531c-29da-4a1a-869d-66ad2de9e055-certs\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.805948 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-webhook-cert\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.807870 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.808996 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/e9552881-bebb-461b-b37a-3ce4f0a4de8f-signing-key\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.809667 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.810375 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0f91531c-29da-4a1a-869d-66ad2de9e055-node-bootstrap-token\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.812352 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc2kh\" (UniqueName: \"kubernetes.io/projected/6aefeb9f-c93c-4943-82be-7c02f9366c65-kube-api-access-sc2kh\") pod \"ingress-operator-5b745b69d9-7vx8p\" (UID: \"6aefeb9f-c93c-4943-82be-7c02f9366c65\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.814477 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f52303ce-0d76-4bda-af76-9d78676487bd-proxy-tls\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.814597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0f91531c-29da-4a1a-869d-66ad2de9e055-certs\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.815074 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-metrics-certs\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.815138 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f05f69ab-11cf-44f5-84b7-d9387c51e162-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.815775 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-webhook-cert\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.816006 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cc59d9b6-8e68-4209-8e08-5a10b5a77620-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.822712 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2779189e-5fcd-4f4d-8123-f08906f94d42-serving-cert\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.822824 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/17e230d9-1bcc-4715-bb34-d36181eda723-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-z2hbw\" (UID: \"17e230d9-1bcc-4715-bb34-d36181eda723\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.829676 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2d11e316-3b77-4c43-ba00-4bd026183952-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q68ll\" (UID: \"2d11e316-3b77-4c43-ba00-4bd026183952\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.829713 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-stats-auth\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.830549 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-bound-sa-token\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.831151 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbbkz\" (UniqueName: \"kubernetes.io/projected/8efa0848-1d58-46b7-896e-83503c62e712-kube-api-access-jbbkz\") pod \"machine-config-controller-84d6567774-8v24r\" (UID: \"8efa0848-1d58-46b7-896e-83503c62e712\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.831637 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a7707651-619b-45a6-ab81-3b7416f4b6dd-profile-collector-cert\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.832457 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cc59d9b6-8e68-4209-8e08-5a10b5a77620-srv-cert\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.833175 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/4f791e66-ece7-47e4-a9bb-71c940fc336d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-tmxxh\" (UID: \"4f791e66-ece7-47e4-a9bb-71c940fc336d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.833262 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a7707651-619b-45a6-ab81-3b7416f4b6dd-srv-cert\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.844070 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-apiservice-cert\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.867708 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5a855422-3b25-4bdd-9df5-10a39f02225e-default-certificate\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.867789 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a28be6b3-8255-4ce0-be32-66f19733171a-metrics-tls\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.868106 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d91844f8-e11c-4a91-86ae-be01d3d901fe-secret-volume\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.869283 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/00093396-6c0c-432f-99e3-f1c059a3f83a-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-c2pfz\" (UID: \"00093396-6c0c-432f-99e3-f1c059a3f83a\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.891183 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pff7t\" (UniqueName: \"kubernetes.io/projected/212c87fc-6e91-409c-a18d-932af9399ac8-kube-api-access-pff7t\") pod \"authentication-operator-69f744f599-mlxbs\" (UID: \"212c87fc-6e91-409c-a18d-932af9399ac8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.908401 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:58:59 crc kubenswrapper[4747]: E0202 08:58:59.908907 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.408891671 +0000 UTC m=+152.953230104 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.910856 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb68s\" (UniqueName: \"kubernetes.io/projected/a7707651-619b-45a6-ab81-3b7416f4b6dd-kube-api-access-xb68s\") pod \"olm-operator-6b444d44fb-l8xs2\" (UID: \"a7707651-619b-45a6-ab81-3b7416f4b6dd\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.913706 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.941684 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf86c\" (UniqueName: \"kubernetes.io/projected/e9552881-bebb-461b-b37a-3ce4f0a4de8f-kube-api-access-wf86c\") pod \"service-ca-9c57cc56f-nj4vs\" (UID: \"e9552881-bebb-461b-b37a-3ce4f0a4de8f\") " pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.955272 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.957386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gldjq\" (UniqueName: \"kubernetes.io/projected/f52303ce-0d76-4bda-af76-9d78676487bd-kube-api-access-gldjq\") pod \"machine-config-operator-74547568cd-tppnd\" (UID: \"f52303ce-0d76-4bda-af76-9d78676487bd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.962874 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.968618 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.976304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwlvb\" (UniqueName: \"kubernetes.io/projected/4d446bb5-3d8d-4a2f-8e86-68630e466d82-kube-api-access-lwlvb\") pod \"ingress-canary-g88c2\" (UID: \"4d446bb5-3d8d-4a2f-8e86-68630e466d82\") " pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.978698 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.986857 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.996001 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz6ll\" (UniqueName: \"kubernetes.io/projected/d91844f8-e11c-4a91-86ae-be01d3d901fe-kube-api-access-tz6ll\") pod \"collect-profiles-29500365-j9tdh\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:58:59 crc kubenswrapper[4747]: I0202 08:58:59.998128 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.009997 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.012443 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.512419712 +0000 UTC m=+153.056758145 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.018653 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm67j\" (UniqueName: \"kubernetes.io/projected/f8194841-4b1b-4477-8147-cea5ca9d791a-kube-api-access-dm67j\") pod \"csi-hostpathplugin-z67kq\" (UID: \"f8194841-4b1b-4477-8147-cea5ca9d791a\") " pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.030439 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2"] Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.033535 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pgb6\" (UniqueName: \"kubernetes.io/projected/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-kube-api-access-9pgb6\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.056751 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65p6n\" (UniqueName: \"kubernetes.io/projected/42f4cc75-06b6-48f7-95cb-915be0b67e72-kube-api-access-65p6n\") pod \"marketplace-operator-79b997595-pw275\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.069911 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fzrb\" (UniqueName: \"kubernetes.io/projected/cc59d9b6-8e68-4209-8e08-5a10b5a77620-kube-api-access-4fzrb\") pod \"catalog-operator-68c6474976-4zlqc\" (UID: \"cc59d9b6-8e68-4209-8e08-5a10b5a77620\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.073203 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b"] Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.102194 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.103628 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tzpn\" (UniqueName: \"kubernetes.io/projected/2d11e316-3b77-4c43-ba00-4bd026183952-kube-api-access-8tzpn\") pod \"multus-admission-controller-857f4d67dd-q68ll\" (UID: \"2d11e316-3b77-4c43-ba00-4bd026183952\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.110662 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.112120 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.113161 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.613141361 +0000 UTC m=+153.157479794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.114661 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwb4z\" (UniqueName: \"kubernetes.io/projected/a28be6b3-8255-4ce0-be32-66f19733171a-kube-api-access-jwb4z\") pod \"dns-default-hrg7n\" (UID: \"a28be6b3-8255-4ce0-be32-66f19733171a\") " pod="openshift-dns/dns-default-hrg7n" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.118423 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.132848 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq79q\" (UniqueName: \"kubernetes.io/projected/5a855422-3b25-4bdd-9df5-10a39f02225e-kube-api-access-vq79q\") pod \"router-default-5444994796-prs7q\" (UID: \"5a855422-3b25-4bdd-9df5-10a39f02225e\") " pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.153733 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.162105 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hrg7n" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.169214 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-g88c2" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.192521 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkc2k\" (UniqueName: \"kubernetes.io/projected/f05f69ab-11cf-44f5-84b7-d9387c51e162-kube-api-access-qkc2k\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9lzj\" (UID: \"f05f69ab-11cf-44f5-84b7-d9387c51e162\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.197328 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsmgk\" (UniqueName: \"kubernetes.io/projected/e96fc5c0-9b3a-46ae-a43b-8a5effe1d897-kube-api-access-qsmgk\") pod \"migrator-59844c95c7-gr5sp\" (UID: \"e96fc5c0-9b3a-46ae-a43b-8a5effe1d897\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.200246 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2jgj\" (UniqueName: \"kubernetes.io/projected/4f791e66-ece7-47e4-a9bb-71c940fc336d-kube-api-access-k2jgj\") pod \"control-plane-machine-set-operator-78cbb6b69f-tmxxh\" (UID: \"4f791e66-ece7-47e4-a9bb-71c940fc336d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.205621 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" event={"ID":"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df","Type":"ContainerStarted","Data":"c49ecb292137db88188df54e152ac9c7a7914cc278f34199603229c7f28e04f1"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.209239 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9fs4\" (UniqueName: \"kubernetes.io/projected/0f91531c-29da-4a1a-869d-66ad2de9e055-kube-api-access-x9fs4\") pod \"machine-config-server-mvhcg\" (UID: \"0f91531c-29da-4a1a-869d-66ad2de9e055\") " pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.216056 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.216546 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.716527628 +0000 UTC m=+153.260866061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.220420 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" event={"ID":"4408517a-3bf8-4e25-956c-04460df20b30","Type":"ContainerStarted","Data":"60fbf3b259d484d59dbcf1a5655d7994e1e198029f03b218e0c3b1c2a7a00a6c"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.228230 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" event={"ID":"959d6a98-e62b-425b-910b-d51ad4a01d1a","Type":"ContainerStarted","Data":"2ff56f9a6594d779742954a8bd80fb75cf7df8b3fae596a56b220186b58309d7"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.228277 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" event={"ID":"959d6a98-e62b-425b-910b-d51ad4a01d1a","Type":"ContainerStarted","Data":"d5ae700e3d7e973be9f73a3db34cc60018f7ec5abbc289a3b9067dd1168f4219"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.228390 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7rxh\" (UniqueName: \"kubernetes.io/projected/2779189e-5fcd-4f4d-8123-f08906f94d42-kube-api-access-x7rxh\") pod \"service-ca-operator-777779d784-mb7jj\" (UID: \"2779189e-5fcd-4f4d-8123-f08906f94d42\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.242419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" event={"ID":"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1","Type":"ContainerStarted","Data":"dfa6c23569fbb4e0ae79156f3dfc9d7a93fcb8a56a9cdfeaeb608456504ef198"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.242459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" event={"ID":"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1","Type":"ContainerStarted","Data":"0a9ec4b4376eb5beb90372e78c6cf8212bc0275291bc902848c7b08e816caca5"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.253395 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7922\" (UniqueName: \"kubernetes.io/projected/17e230d9-1bcc-4715-bb34-d36181eda723-kube-api-access-n7922\") pod \"package-server-manager-789f6589d5-z2hbw\" (UID: \"17e230d9-1bcc-4715-bb34-d36181eda723\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.256655 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" event={"ID":"4e7a8080-f368-4138-911b-b9d6e4c84ea1","Type":"ContainerStarted","Data":"522786370ee79bc1617c61c4bba3d4cc2d4ed1e8651bab96c7d012b0ac747e78"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.256717 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" event={"ID":"4e7a8080-f368-4138-911b-b9d6e4c84ea1","Type":"ContainerStarted","Data":"bd47d7f952af4d21f853785c72e729e66d8d9e9dc43c27dbb92e0706f12f41d3"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.257285 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.259912 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" event={"ID":"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873","Type":"ContainerStarted","Data":"59e10c7989a4d8f9ea13fb0100f6bfe89c70ed444472d7e88da93dd516f08116"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.259986 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" event={"ID":"1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873","Type":"ContainerStarted","Data":"70e3cc65558f7ba1dcb7c7aa41e8ee14829a2c50dc64ce1024221e7dfa7c4a56"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.261033 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.274186 4747 patch_prober.go:28] interesting pod/console-operator-58897d9998-rcnhl container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.274254 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" podUID="1214e93d-7fdc-4d8a-9f2f-c6aa87bb2873" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.274602 4747 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-wq8xw container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.274623 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" podUID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.285741 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" event={"ID":"3ffc01dc-2fb5-4474-8f30-acf0ef50441d","Type":"ContainerStarted","Data":"eac1affd1dd5c9e0ced8cca7fc6fbcb0d60fd73fb538fd59ca06cfc0b03affa3"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.286019 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" event={"ID":"3ffc01dc-2fb5-4474-8f30-acf0ef50441d","Type":"ContainerStarted","Data":"4ab121964b21e1e968fbeb7f7aced54835676a2dbd720881abd1b79fb981b3ea"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.303550 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" event={"ID":"3f4b7293-0efa-48e5-8406-f6a196867ad2","Type":"ContainerStarted","Data":"925966442d9cc1688cd3f27caece84aacbf168942a034881cb5b359063061c84"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.305041 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.314528 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.317355 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-s6zqr" event={"ID":"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3","Type":"ContainerStarted","Data":"d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.317393 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-s6zqr" event={"ID":"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3","Type":"ContainerStarted","Data":"f23128c7d79832ccfb536da9288284c5ab9d903b9989c1499c6f5d11795b6084"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.318549 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.319756 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.81974404 +0000 UTC m=+153.364082473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.323381 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" event={"ID":"49c2f80b-4c99-4b71-b200-c5148ca07dce","Type":"ContainerStarted","Data":"023b4e42a1c0b25692af56dedd02d370eb72a3772115efe1ea13759d23d495f0"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.323554 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.325534 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" event={"ID":"92ad06d4-69fe-4a8f-a7b1-57352159c0eb","Type":"ContainerStarted","Data":"6ae3796a0a3951e92fef918b05078ab22b09d290be08fb408c1a12f73fc7e6a8"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.325558 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" event={"ID":"92ad06d4-69fe-4a8f-a7b1-57352159c0eb","Type":"ContainerStarted","Data":"3fe64257ce8f331ba94ace49677e7d9bb500e3b7a1ac3348fe3e8da191a6f3b6"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.334086 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" event={"ID":"c63a52e2-6fdd-4b79-b054-669bcc611dcb","Type":"ContainerStarted","Data":"632eb26562702245e1d6af7e9730767d0f10381afe5f51e7a92bed4ca52238a3"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.334123 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" event={"ID":"c63a52e2-6fdd-4b79-b054-669bcc611dcb","Type":"ContainerStarted","Data":"8bfc2baab3690781c731f12dbd695e82ebbd5fe8011aa6d780488503076825bf"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.341004 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-m55qb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.341069 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m55qb" podUID="8da1d50c-554e-4e30-8cc2-cb52865e504c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.342068 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.346037 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kzqv\" (UniqueName: \"kubernetes.io/projected/21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5-kube-api-access-9kzqv\") pod \"packageserver-d55dfcdfc-t9rvz\" (UID: \"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.351219 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.351367 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e4cf43d-a3b1-444a-b68a-9d59f3f45a78-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9zx7z\" (UID: \"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.361905 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.373283 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.390574 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.398917 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.420628 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.421306 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" event={"ID":"c3e72ed1-5b4f-483c-8029-a71db619acce","Type":"ContainerStarted","Data":"71dbddd1f84032fc473638c4d51d2c26c8bebf2f914038f944847aeaa658d205"} Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.421803 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.421879 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" event={"ID":"c3e72ed1-5b4f-483c-8029-a71db619acce","Type":"ContainerStarted","Data":"308e8a9fb9a344d8d3456ce633dc74c1168adf003db1780f9e550493623cff54"} Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.421987 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.921727151 +0000 UTC m=+153.466065584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.423811 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.432980 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.454198 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:00.954173302 +0000 UTC m=+153.498511745 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.477637 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-mvhcg" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.495428 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.526777 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.528349 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.02830974 +0000 UTC m=+153.572648173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.555482 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7gp42"] Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.567204 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r"] Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.570216 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mlxbs"] Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.630828 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.631294 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.131277196 +0000 UTC m=+153.675615629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.637060 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" Feb 02 08:59:00 crc kubenswrapper[4747]: W0202 08:59:00.685495 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6fb2719_3a65_4654_b670_9999c570d564.slice/crio-d17741b7e83bf83fec1d617c0c4b6ab1f4d48de9b0a59b1e34990a06d0774b5c WatchSource:0}: Error finding container d17741b7e83bf83fec1d617c0c4b6ab1f4d48de9b0a59b1e34990a06d0774b5c: Status 404 returned error can't find the container with id d17741b7e83bf83fec1d617c0c4b6ab1f4d48de9b0a59b1e34990a06d0774b5c Feb 02 08:59:00 crc kubenswrapper[4747]: W0202 08:59:00.689426 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8efa0848_1d58_46b7_896e_83503c62e712.slice/crio-461a9e82d93fb2ef041345a463ded451a232316502fd02ec48df94fa0bfe96ce WatchSource:0}: Error finding container 461a9e82d93fb2ef041345a463ded451a232316502fd02ec48df94fa0bfe96ce: Status 404 returned error can't find the container with id 461a9e82d93fb2ef041345a463ded451a232316502fd02ec48df94fa0bfe96ce Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.732702 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.733165 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.233150435 +0000 UTC m=+153.777488868 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.833952 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.834763 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.334744656 +0000 UTC m=+153.879083089 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:00 crc kubenswrapper[4747]: I0202 08:59:00.943512 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:00 crc kubenswrapper[4747]: E0202 08:59:00.943871 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.443851059 +0000 UTC m=+153.988189492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.037671 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" podStartSLOduration=128.037651711 podStartE2EDuration="2m8.037651711s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.03724107 +0000 UTC m=+153.581579513" watchObservedRunningTime="2026-02-02 08:59:01.037651711 +0000 UTC m=+153.581990144" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.048587 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.048891 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.548878218 +0000 UTC m=+154.093216651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.050687 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.149437 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-m55qb" podStartSLOduration=129.149419282 podStartE2EDuration="2m9.149419282s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.147412231 +0000 UTC m=+153.691750664" watchObservedRunningTime="2026-02-02 08:59:01.149419282 +0000 UTC m=+153.693757715" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.150797 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.151193 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.651177918 +0000 UTC m=+154.195516351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.267274 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.267574 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.767563787 +0000 UTC m=+154.311902220 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.284446 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-s6zqr" podStartSLOduration=129.284428209 podStartE2EDuration="2m9.284428209s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.196858877 +0000 UTC m=+153.741197300" watchObservedRunningTime="2026-02-02 08:59:01.284428209 +0000 UTC m=+153.828766652" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.363767 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" podStartSLOduration=128.36374712 podStartE2EDuration="2m8.36374712s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.363163975 +0000 UTC m=+153.907502428" watchObservedRunningTime="2026-02-02 08:59:01.36374712 +0000 UTC m=+153.908085553" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.369039 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.371481 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.871459537 +0000 UTC m=+154.415797970 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.394284 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" event={"ID":"49c2f80b-4c99-4b71-b200-c5148ca07dce","Type":"ContainerStarted","Data":"4724de3804ac7836c0d55df7e4d8d6a535dcef4124d74b23a67adbba3a965a34"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.417687 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-prs7q" event={"ID":"5a855422-3b25-4bdd-9df5-10a39f02225e","Type":"ContainerStarted","Data":"65857dc50ef58549b0fca0fe0bde23987a583c04d08d588893d7079fd24ed8a8"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.417739 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-prs7q" event={"ID":"5a855422-3b25-4bdd-9df5-10a39f02225e","Type":"ContainerStarted","Data":"ee24e7f0ae740d9af00d6fe5c472d9676c9da3aaa2014c271d6a9c69612bb013"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.425809 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.428284 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" podStartSLOduration=129.428261482 podStartE2EDuration="2m9.428261482s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.422958526 +0000 UTC m=+153.967296979" watchObservedRunningTime="2026-02-02 08:59:01.428261482 +0000 UTC m=+153.972599915" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.451598 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.463982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" event={"ID":"e7cf0e06-e52e-482e-93c3-e8a49d9dc0a1","Type":"ContainerStarted","Data":"c441b4c45bf587f246f636c6bfa191aa0659e5ca552fd6d3d1031cd900b94946"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.473465 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.474507 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:01.974494775 +0000 UTC m=+154.518833208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.481097 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" event={"ID":"3ffc01dc-2fb5-4474-8f30-acf0ef50441d","Type":"ContainerStarted","Data":"aaeac0f514ce7eab01e977b3c5208f8c8a507b7044b206ab8e02a7b61e013883"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.508669 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.511481 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.525800 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.537340 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" event={"ID":"225573d2-7c3a-4c04-b1d4-9971e2145c1e","Type":"ContainerStarted","Data":"22d00a44c97bb90afaf68a03240f1553bc20c7b81cd62d354d8eed5105723e1c"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.545463 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" event={"ID":"3f4b7293-0efa-48e5-8406-f6a196867ad2","Type":"ContainerStarted","Data":"677be93445dd94056409c5693d4991d625eb7e4af901b3baa4c3ea81243ac4f5"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.561169 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-mvhcg" event={"ID":"0f91531c-29da-4a1a-869d-66ad2de9e055","Type":"ContainerStarted","Data":"eb8a917aab20e79b7b0ce7f2346d386ef1dcafe7df29fd7c309c3d3fdc56efe2"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.575350 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.577238 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.077210285 +0000 UTC m=+154.621548718 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.587157 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" event={"ID":"8efa0848-1d58-46b7-896e-83503c62e712","Type":"ContainerStarted","Data":"461a9e82d93fb2ef041345a463ded451a232316502fd02ec48df94fa0bfe96ce"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.595603 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" event={"ID":"212c87fc-6e91-409c-a18d-932af9399ac8","Type":"ContainerStarted","Data":"163aeeec073b4c0700833ccb8f0779ddda10fb0683a52bf954da9deae71d3fed"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.602057 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" event={"ID":"f6fb2719-3a65-4654-b670-9999c570d564","Type":"ContainerStarted","Data":"d17741b7e83bf83fec1d617c0c4b6ab1f4d48de9b0a59b1e34990a06d0774b5c"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.620068 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" event={"ID":"c63a52e2-6fdd-4b79-b054-669bcc611dcb","Type":"ContainerStarted","Data":"b7545a4ac9334b5228cc1afd8ea73857b6842e41f05395ca2c95d385bf8594bd"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.665782 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-g88c2"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.678653 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.678987 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.178974351 +0000 UTC m=+154.723312774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.680399 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hrg7n"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.688221 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" event={"ID":"6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df","Type":"ContainerStarted","Data":"f5280c5bfb07c405a1586410ca955a24f3f9a96025c57d31791b99f706fe99e5"} Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.696113 4747 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-wq8xw container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.696158 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" podUID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.746270 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8tmzw" podStartSLOduration=129.746250313 podStartE2EDuration="2m9.746250313s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.713550536 +0000 UTC m=+154.257888979" watchObservedRunningTime="2026-02-02 08:59:01.746250313 +0000 UTC m=+154.290588746" Feb 02 08:59:01 crc kubenswrapper[4747]: W0202 08:59:01.788208 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda28be6b3_8255_4ce0_be32_66f19733171a.slice/crio-101bd8c6faf46b3c8be39bfe845d2fadf4d55236fcbdecf6a239595c7686e525 WatchSource:0}: Error finding container 101bd8c6faf46b3c8be39bfe845d2fadf4d55236fcbdecf6a239595c7686e525: Status 404 returned error can't find the container with id 101bd8c6faf46b3c8be39bfe845d2fadf4d55236fcbdecf6a239595c7686e525 Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.788539 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.789753 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.289734957 +0000 UTC m=+154.834073390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.789965 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.790888 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-kz8jx" podStartSLOduration=129.790877666 podStartE2EDuration="2m9.790877666s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.748620604 +0000 UTC m=+154.292959037" watchObservedRunningTime="2026-02-02 08:59:01.790877666 +0000 UTC m=+154.335216099" Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.791200 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.291191774 +0000 UTC m=+154.835530207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.814866 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" podStartSLOduration=129.814836939 podStartE2EDuration="2m9.814836939s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.791406829 +0000 UTC m=+154.335745252" watchObservedRunningTime="2026-02-02 08:59:01.814836939 +0000 UTC m=+154.359175372" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.881333 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" podStartSLOduration=129.881315811 podStartE2EDuration="2m9.881315811s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.876254822 +0000 UTC m=+154.420593265" watchObservedRunningTime="2026-02-02 08:59:01.881315811 +0000 UTC m=+154.425654244" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.894758 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.896067 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.396049429 +0000 UTC m=+154.940387862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.927524 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-7dcg9" podStartSLOduration=129.927511464 podStartE2EDuration="2m9.927511464s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:01.926770155 +0000 UTC m=+154.471108588" watchObservedRunningTime="2026-02-02 08:59:01.927511464 +0000 UTC m=+154.471849897" Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.956580 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nj4vs"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.977371 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc"] Feb 02 08:59:01 crc kubenswrapper[4747]: W0202 08:59:01.982454 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9552881_bebb_461b_b37a_3ce4f0a4de8f.slice/crio-a7ca0e3a4cac34cd5f2bcc2b5dbad0afcc9fc6b570ba8ed3e3bd73fd7f4af499 WatchSource:0}: Error finding container a7ca0e3a4cac34cd5f2bcc2b5dbad0afcc9fc6b570ba8ed3e3bd73fd7f4af499: Status 404 returned error can't find the container with id a7ca0e3a4cac34cd5f2bcc2b5dbad0afcc9fc6b570ba8ed3e3bd73fd7f4af499 Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.990585 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-z67kq"] Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.998867 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:01 crc kubenswrapper[4747]: E0202 08:59:01.999436 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.499421755 +0000 UTC m=+155.043760188 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:01 crc kubenswrapper[4747]: I0202 08:59:01.999447 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.003337 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pw275"] Feb 02 08:59:02 crc kubenswrapper[4747]: W0202 08:59:02.049166 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc59d9b6_8e68_4209_8e08_5a10b5a77620.slice/crio-1be95d64751a06ea9036a48412e9ee120b2090223feeecb99eeae4fc6c2d7bd2 WatchSource:0}: Error finding container 1be95d64751a06ea9036a48412e9ee120b2090223feeecb99eeae4fc6c2d7bd2: Status 404 returned error can't find the container with id 1be95d64751a06ea9036a48412e9ee120b2090223feeecb99eeae4fc6c2d7bd2 Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.064865 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.069742 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.100008 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.100665 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.600650987 +0000 UTC m=+155.144989420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.108453 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.114253 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.120591 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q68ll"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.135853 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.158024 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-rcnhl" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.172548 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj"] Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.202799 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.224265 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.724248421 +0000 UTC m=+155.268586854 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.303346 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.303955 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.803927001 +0000 UTC m=+155.348265424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.316008 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.322906 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:02 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Feb 02 08:59:02 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:02 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.323013 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.378467 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-xm7wg" podStartSLOduration=129.378449319 podStartE2EDuration="2m9.378449319s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.308770545 +0000 UTC m=+154.853108968" watchObservedRunningTime="2026-02-02 08:59:02.378449319 +0000 UTC m=+154.922787752" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.378855 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-b9jzq" podStartSLOduration=130.37884961 podStartE2EDuration="2m10.37884961s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.378424279 +0000 UTC m=+154.922762712" watchObservedRunningTime="2026-02-02 08:59:02.37884961 +0000 UTC m=+154.923188043" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.407228 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.407591 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:02.907579165 +0000 UTC m=+155.451917598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.416173 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-prs7q" podStartSLOduration=130.416153075 podStartE2EDuration="2m10.416153075s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.414209885 +0000 UTC m=+154.958548318" watchObservedRunningTime="2026-02-02 08:59:02.416153075 +0000 UTC m=+154.960491518" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.510860 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.511385 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.011364472 +0000 UTC m=+155.555702905 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.595424 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-swbbc" podStartSLOduration=130.595408754 podStartE2EDuration="2m10.595408754s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.491270418 +0000 UTC m=+155.035608871" watchObservedRunningTime="2026-02-02 08:59:02.595408754 +0000 UTC m=+155.139747187" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.634721 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.635529 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.135268585 +0000 UTC m=+155.679607028 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.671094 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" podStartSLOduration=130.671079642 podStartE2EDuration="2m10.671079642s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.604410195 +0000 UTC m=+155.148748628" watchObservedRunningTime="2026-02-02 08:59:02.671079642 +0000 UTC m=+155.215418075" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.738740 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.739001 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.23898784 +0000 UTC m=+155.783326273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.756879 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" event={"ID":"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78","Type":"ContainerStarted","Data":"10f1c586f892709cbe20ad10c35b349e8d553a9c00efc1cf0dbc4db22eb86ed6"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.778062 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" event={"ID":"e96fc5c0-9b3a-46ae-a43b-8a5effe1d897","Type":"ContainerStarted","Data":"14869289f22c5c450f48894041b95d5668c39cc84f466f9da9c5313460717c95"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.800192 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" event={"ID":"e9552881-bebb-461b-b37a-3ce4f0a4de8f","Type":"ContainerStarted","Data":"a7ca0e3a4cac34cd5f2bcc2b5dbad0afcc9fc6b570ba8ed3e3bd73fd7f4af499"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.816501 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-mvhcg" event={"ID":"0f91531c-29da-4a1a-869d-66ad2de9e055","Type":"ContainerStarted","Data":"f18d4fa5680447fa3ceadb901da52b4a66286105ad7087f03c5a598979c2f1f4"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.828856 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" event={"ID":"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5","Type":"ContainerStarted","Data":"0e56e4b670bbded0e2ff5ba4d52af9e3758e9da3749b90f2edda270495491d3c"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.834213 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" podStartSLOduration=129.834196938 podStartE2EDuration="2m9.834196938s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.833128571 +0000 UTC m=+155.377467004" watchObservedRunningTime="2026-02-02 08:59:02.834196938 +0000 UTC m=+155.378535371" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.837183 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gtm8b" podStartSLOduration=130.837172464 podStartE2EDuration="2m10.837172464s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.719085071 +0000 UTC m=+155.263423504" watchObservedRunningTime="2026-02-02 08:59:02.837172464 +0000 UTC m=+155.381510897" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.839924 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.840307 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.340295264 +0000 UTC m=+155.884633697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.850102 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" event={"ID":"212c87fc-6e91-409c-a18d-932af9399ac8","Type":"ContainerStarted","Data":"a2145b4be273057fa89259a1d134f83cdaff26f77e80da4a7b30d5b115ca9cc1"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.861316 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" event={"ID":"f52303ce-0d76-4bda-af76-9d78676487bd","Type":"ContainerStarted","Data":"71e6897a109f679fa5c37e4f4ee1757543b8ec4ef13c2da1e227901ae7cd3534"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.861670 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" event={"ID":"f52303ce-0d76-4bda-af76-9d78676487bd","Type":"ContainerStarted","Data":"4b9f633d98834f29fbcb0819cb86e8e98943efe9d5bd40408cc7f2835a793f45"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.868027 4747 generic.go:334] "Generic (PLEG): container finished" podID="f6fb2719-3a65-4654-b670-9999c570d564" containerID="1e3d28bdc15b1dabe13730304d8809df5bc8e8b5331a15f6dd8e242f5f928a92" exitCode=0 Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.868176 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" event={"ID":"f6fb2719-3a65-4654-b670-9999c570d564","Type":"ContainerDied","Data":"1e3d28bdc15b1dabe13730304d8809df5bc8e8b5331a15f6dd8e242f5f928a92"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.881651 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" event={"ID":"00093396-6c0c-432f-99e3-f1c059a3f83a","Type":"ContainerStarted","Data":"785e348a4f08aaef18e357ee21420d3e68cbd1598eea47c91afefa96661846f5"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.881724 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" event={"ID":"00093396-6c0c-432f-99e3-f1c059a3f83a","Type":"ContainerStarted","Data":"cbe7f474de3a2f0cbdd6e52c8348bd0f8aead8b1c1a22fa19df7085eb9176268"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.888406 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-mvhcg" podStartSLOduration=5.888390295 podStartE2EDuration="5.888390295s" podCreationTimestamp="2026-02-02 08:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.858314865 +0000 UTC m=+155.402653308" watchObservedRunningTime="2026-02-02 08:59:02.888390295 +0000 UTC m=+155.432728728" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.907189 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-mlxbs" podStartSLOduration=130.907168356 podStartE2EDuration="2m10.907168356s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.887518803 +0000 UTC m=+155.431857236" watchObservedRunningTime="2026-02-02 08:59:02.907168356 +0000 UTC m=+155.451506789" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.925681 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-c2pfz" podStartSLOduration=130.92566157 podStartE2EDuration="2m10.92566157s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:02.92020868 +0000 UTC m=+155.464547123" watchObservedRunningTime="2026-02-02 08:59:02.92566157 +0000 UTC m=+155.470000003" Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.954053 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.954422 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.454379105 +0000 UTC m=+155.998717538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.955832 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:02 crc kubenswrapper[4747]: E0202 08:59:02.957447 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.457429883 +0000 UTC m=+156.001768396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.971329 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" event={"ID":"2779189e-5fcd-4f4d-8123-f08906f94d42","Type":"ContainerStarted","Data":"d081f1ffe6133e879db89fbcf0d171b676b0b2c017225d3a0634891f65063047"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.985300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" event={"ID":"a7707651-619b-45a6-ab81-3b7416f4b6dd","Type":"ContainerStarted","Data":"af83fdf01053683a1a6f7b524d3d70d88cfcc3737988949aefe490b8f1fcaf48"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.985350 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" event={"ID":"a7707651-619b-45a6-ab81-3b7416f4b6dd","Type":"ContainerStarted","Data":"384aa2cf5e02b02372cf0b56ed43a97ead012808b886617263cc001c2fdc2d09"} Feb 02 08:59:02 crc kubenswrapper[4747]: I0202 08:59:02.986678 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:02.996732 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:02.996771 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:02.997103 4747 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-l8xs2 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:02.997143 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" podUID="a7707651-619b-45a6-ab81-3b7416f4b6dd" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.005535 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-g88c2" event={"ID":"4d446bb5-3d8d-4a2f-8e86-68630e466d82","Type":"ContainerStarted","Data":"44939389c190e6a22b588436b11ca6337fb1ef063399ac833b195fbe0899fe3f"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.005581 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-g88c2" event={"ID":"4d446bb5-3d8d-4a2f-8e86-68630e466d82","Type":"ContainerStarted","Data":"8cf7a2f77c868e02e4c8bd250c68880f53d25d42149307e25598d54a4b2b3fac"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.010670 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" event={"ID":"6aefeb9f-c93c-4943-82be-7c02f9366c65","Type":"ContainerStarted","Data":"8f5469d79faf0641d20aa1ca92a4413f5193bc9cff90d36540808ce347979f67"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.010723 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" event={"ID":"6aefeb9f-c93c-4943-82be-7c02f9366c65","Type":"ContainerStarted","Data":"059af70bdc62ce56c6884cbf9b7edafd073be2946d0007d934f265e5d70650c6"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.025875 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" event={"ID":"17e230d9-1bcc-4715-bb34-d36181eda723","Type":"ContainerStarted","Data":"e4200564614e9958915413b6fdb3e2e9848fe41da02fadd18147bf36d6c8bb40"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.040153 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.050746 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" podStartSLOduration=130.050728851 podStartE2EDuration="2m10.050728851s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.004138618 +0000 UTC m=+155.548477051" watchObservedRunningTime="2026-02-02 08:59:03.050728851 +0000 UTC m=+155.595067284" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.051595 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" podStartSLOduration=130.051586833 podStartE2EDuration="2m10.051586833s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.050007122 +0000 UTC m=+155.594345585" watchObservedRunningTime="2026-02-02 08:59:03.051586833 +0000 UTC m=+155.595925266" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.052789 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.055975 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.056317 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.058203 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.558181282 +0000 UTC m=+156.102519715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.072600 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" event={"ID":"42f4cc75-06b6-48f7-95cb-915be0b67e72","Type":"ContainerStarted","Data":"d2c619408f27133ac2f9132f6678f1c88f1988fbf904863994aee9b4cfaa4250"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.073438 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.126096 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pw275 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.126365 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.126498 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" event={"ID":"4f791e66-ece7-47e4-a9bb-71c940fc336d","Type":"ContainerStarted","Data":"0aa2a9a938d1b4a940b6368c9fa55df7cbe8963e0d7a47499a3ae2b286e5717b"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.141875 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hrg7n" event={"ID":"a28be6b3-8255-4ce0-be32-66f19733171a","Type":"ContainerStarted","Data":"f6e2aaee6bb476a602a7f51360253e0710caff5b2317307190f3d24a53f419f5"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.141922 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hrg7n" event={"ID":"a28be6b3-8255-4ce0-be32-66f19733171a","Type":"ContainerStarted","Data":"101bd8c6faf46b3c8be39bfe845d2fadf4d55236fcbdecf6a239595c7686e525"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.147537 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-g88c2" podStartSLOduration=6.147518749 podStartE2EDuration="6.147518749s" podCreationTimestamp="2026-02-02 08:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.098918535 +0000 UTC m=+155.643256968" watchObservedRunningTime="2026-02-02 08:59:03.147518749 +0000 UTC m=+155.691857172" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.154179 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" event={"ID":"225573d2-7c3a-4c04-b1d4-9971e2145c1e","Type":"ContainerStarted","Data":"787fd53cbda4cea01742c44dfbba86db28b0bc50de41065193aa9fee93a0815b"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.161894 4747 patch_prober.go:28] interesting pod/apiserver-76f77b778f-g96zn container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]log ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]etcd ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/generic-apiserver-start-informers ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/max-in-flight-filter ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/image.openshift.io-apiserver-caches ok Feb 02 08:59:03 crc kubenswrapper[4747]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Feb 02 08:59:03 crc kubenswrapper[4747]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/project.openshift.io-projectcache ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-startinformers ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-restmapperupdater ok Feb 02 08:59:03 crc kubenswrapper[4747]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 02 08:59:03 crc kubenswrapper[4747]: livez check failed Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.161970 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" podUID="6cfa55e8-7919-4d29-8b2c-3e9dc46dc3df" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.188816 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.190868 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.690854618 +0000 UTC m=+156.235193051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.202432 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" podStartSLOduration=130.202412354 podStartE2EDuration="2m10.202412354s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.192592993 +0000 UTC m=+155.736931426" watchObservedRunningTime="2026-02-02 08:59:03.202412354 +0000 UTC m=+155.746750787" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.215484 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" event={"ID":"f8194841-4b1b-4477-8147-cea5ca9d791a","Type":"ContainerStarted","Data":"7d5d3d528aa4ad0fe583816eb56531240ee8233401372a4e4627f8c18e818c96"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.215487 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" podStartSLOduration=130.215470789 podStartE2EDuration="2m10.215470789s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.212745439 +0000 UTC m=+155.757083892" watchObservedRunningTime="2026-02-02 08:59:03.215470789 +0000 UTC m=+155.759809252" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.256786 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-88hn7" podStartSLOduration=131.256769746 podStartE2EDuration="2m11.256769746s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.254695723 +0000 UTC m=+155.799034146" watchObservedRunningTime="2026-02-02 08:59:03.256769746 +0000 UTC m=+155.801108179" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.274331 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" event={"ID":"f05f69ab-11cf-44f5-84b7-d9387c51e162","Type":"ContainerStarted","Data":"bff557d42a9083efbb856b1a54473685d2f97f11c523ebb7f348fcdc926e8d55"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.329098 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" event={"ID":"d91844f8-e11c-4a91-86ae-be01d3d901fe","Type":"ContainerStarted","Data":"1f9d9574ff027eb138ac342b2e917c64efcc81cab83a0552a1a9e0f771e86b95"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.329143 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" event={"ID":"d91844f8-e11c-4a91-86ae-be01d3d901fe","Type":"ContainerStarted","Data":"d3ee0164872986366c709ba292f34712d14c4f0006ee10e81751c835302ea292"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.340818 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.341284 4747 csr.go:261] certificate signing request csr-r4wbc is approved, waiting to be issued Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.341364 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:03 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Feb 02 08:59:03 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:03 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.341437 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.341953 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.841919976 +0000 UTC m=+156.386258409 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.369328 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" podStartSLOduration=130.369310268 podStartE2EDuration="2m10.369310268s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.322361195 +0000 UTC m=+155.866699628" watchObservedRunningTime="2026-02-02 08:59:03.369310268 +0000 UTC m=+155.913648701" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.371904 4747 csr.go:257] certificate signing request csr-r4wbc is issued Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.376294 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" event={"ID":"cc59d9b6-8e68-4209-8e08-5a10b5a77620","Type":"ContainerStarted","Data":"1be95d64751a06ea9036a48412e9ee120b2090223feeecb99eeae4fc6c2d7bd2"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.383000 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" podStartSLOduration=131.382965747 podStartE2EDuration="2m11.382965747s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.373469454 +0000 UTC m=+155.917807887" watchObservedRunningTime="2026-02-02 08:59:03.382965747 +0000 UTC m=+155.927304190" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.423904 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" event={"ID":"8efa0848-1d58-46b7-896e-83503c62e712","Type":"ContainerStarted","Data":"b3b8d711f9639c5eb279b411ad6d771498c15cf022d3340800c8f1b45af0f3de"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.423990 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" event={"ID":"8efa0848-1d58-46b7-896e-83503c62e712","Type":"ContainerStarted","Data":"82a96b6ec10a98c36c3980758408301d5ce932dfe6f4ebf7747e501a8aa7e399"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.434750 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" event={"ID":"2d11e316-3b77-4c43-ba00-4bd026183952","Type":"ContainerStarted","Data":"58782679c455e53f3559b20f925470e193838762a1ac8f6fee95898d6cb9e5d8"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.444969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.445348 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:03.945331614 +0000 UTC m=+156.489670047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.475077 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" event={"ID":"49c2f80b-4c99-4b71-b200-c5148ca07dce","Type":"ContainerStarted","Data":"46f6400acea4f8c4428f5bf522bb8ca15e58e323ace329f8b103a189625e1422"} Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.496387 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.514617 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qvchd" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.516496 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8v24r" podStartSLOduration=130.516477755 podStartE2EDuration="2m10.516477755s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.463334985 +0000 UTC m=+156.007673438" watchObservedRunningTime="2026-02-02 08:59:03.516477755 +0000 UTC m=+156.060816188" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.546705 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.548181 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.048166017 +0000 UTC m=+156.592504450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.559426 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-967q2" podStartSLOduration=131.559409175 podStartE2EDuration="2m11.559409175s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:03.517594414 +0000 UTC m=+156.061932847" watchObservedRunningTime="2026-02-02 08:59:03.559409175 +0000 UTC m=+156.103747608" Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.648631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.651368 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.151351639 +0000 UTC m=+156.695690172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.750268 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.750390 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.250365204 +0000 UTC m=+156.794703637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.750506 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.750783 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.250772084 +0000 UTC m=+156.795110507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.860641 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.860788 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.36076506 +0000 UTC m=+156.905103493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.860970 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.861285 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.361276813 +0000 UTC m=+156.905615246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.961674 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.961859 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.461833988 +0000 UTC m=+157.006172411 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:03 crc kubenswrapper[4747]: I0202 08:59:03.961992 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:03 crc kubenswrapper[4747]: E0202 08:59:03.962286 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.462272669 +0000 UTC m=+157.006611102 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.063416 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.063618 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.563590293 +0000 UTC m=+157.107928726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.063852 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.064213 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.564203289 +0000 UTC m=+157.108541722 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.165366 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.165583 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.665555324 +0000 UTC m=+157.209893757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.165740 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.166112 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.666102958 +0000 UTC m=+157.210441391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.266665 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.267029 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.767013971 +0000 UTC m=+157.311352404 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.317866 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:04 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Feb 02 08:59:04 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:04 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.317948 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.367776 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.368224 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.868202732 +0000 UTC m=+157.412541255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.373219 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-02 08:54:03 +0000 UTC, rotation deadline is 2026-10-18 04:55:41.630240036 +0000 UTC Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.373259 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6187h56m37.256983434s for next certificate rotation Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.468801 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.469039 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.969000373 +0000 UTC m=+157.513338806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.469131 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.469425 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:04.969411143 +0000 UTC m=+157.513749576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.491493 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" event={"ID":"17e230d9-1bcc-4715-bb34-d36181eda723","Type":"ContainerStarted","Data":"f96f5fe6f5a46f08989feb4aef853f222e6dcd7f5b856a79a86428863c291d27"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.491540 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" event={"ID":"17e230d9-1bcc-4715-bb34-d36181eda723","Type":"ContainerStarted","Data":"748815de81a2028acf9d0400ba2deea1a51f1b2d8794ffc6b1161606d8ffebaa"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.492355 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.493907 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hrg7n" event={"ID":"a28be6b3-8255-4ce0-be32-66f19733171a","Type":"ContainerStarted","Data":"b2f0b672f65815b3ea526d69bff8c830b49b6053d478de4e3d6c4eb308f4d85b"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.494348 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-hrg7n" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.498646 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" event={"ID":"e96fc5c0-9b3a-46ae-a43b-8a5effe1d897","Type":"ContainerStarted","Data":"f716212e73570a152cfef35289f2c3091a7cadb2197a2d31ee30bd997746f0f7"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.498688 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" event={"ID":"e96fc5c0-9b3a-46ae-a43b-8a5effe1d897","Type":"ContainerStarted","Data":"15ee1ede525d1a4e990da011370b79b67380e32ff36cc71785714c2ddc92ac11"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.500017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9lzj" event={"ID":"f05f69ab-11cf-44f5-84b7-d9387c51e162","Type":"ContainerStarted","Data":"fad9b2a259346a7fad32bd4883484535aae3e6aa9c3b076b5ee36e2d778cedc0"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.518415 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" event={"ID":"6aefeb9f-c93c-4943-82be-7c02f9366c65","Type":"ContainerStarted","Data":"234edfcfb7a89f691823a77fa0fcd5afd65920f0559d495064abab9a05ec65b2"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.524601 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" event={"ID":"f6fb2719-3a65-4654-b670-9999c570d564","Type":"ContainerStarted","Data":"e63ed983bfaa1afbcce7e5689e923f5a300f9fa51852bb9ded05cd72ad7f6ece"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.524716 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.532590 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" podStartSLOduration=131.53256735 podStartE2EDuration="2m11.53256735s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.524894974 +0000 UTC m=+157.069233407" watchObservedRunningTime="2026-02-02 08:59:04.53256735 +0000 UTC m=+157.076905773" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.536729 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" event={"ID":"cc59d9b6-8e68-4209-8e08-5a10b5a77620","Type":"ContainerStarted","Data":"6a1d9db407fff57c1b9330e93f8fab5307b6bda600ebd73e583a39c5feb8441d"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.536953 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.551870 4747 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-4zlqc container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.551918 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" podUID="cc59d9b6-8e68-4209-8e08-5a10b5a77620" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.556456 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-tmxxh" event={"ID":"4f791e66-ece7-47e4-a9bb-71c940fc336d","Type":"ContainerStarted","Data":"b72a190ccc0105ff30126eb4469a47c09536f65cf44b2976187353a459bf6850"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.563447 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mb7jj" event={"ID":"2779189e-5fcd-4f4d-8123-f08906f94d42","Type":"ContainerStarted","Data":"78104c9cac4678cd59128c2ba7821ab1c30d2f77d5998caa496ebcd65ceb9a1c"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.565745 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" event={"ID":"2d11e316-3b77-4c43-ba00-4bd026183952","Type":"ContainerStarted","Data":"a781a9dec1091a1d745dd5c35b43fe2a42c052bac0b439c81304e23c09e75ae7"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.565782 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" event={"ID":"2d11e316-3b77-4c43-ba00-4bd026183952","Type":"ContainerStarted","Data":"f44f6563d4dd7fb3a313f901a7714bb3dbc81e2a94ade9036491a2be7d4a96de"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.569586 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.569796 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" event={"ID":"5e4cf43d-a3b1-444a-b68a-9d59f3f45a78","Type":"ContainerStarted","Data":"3806093ad6dfc8d4edf37a92f1824f3adfe8e3578f04a209fc33b12cbb85be70"} Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.569965 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.069947857 +0000 UTC m=+157.614286290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.581397 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-gr5sp" podStartSLOduration=131.58137791 podStartE2EDuration="2m11.58137791s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.57355617 +0000 UTC m=+157.117894603" watchObservedRunningTime="2026-02-02 08:59:04.58137791 +0000 UTC m=+157.125716393" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.597159 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" event={"ID":"42f4cc75-06b6-48f7-95cb-915be0b67e72","Type":"ContainerStarted","Data":"4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.598603 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pw275 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.598648 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.601594 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" event={"ID":"f52303ce-0d76-4bda-af76-9d78676487bd","Type":"ContainerStarted","Data":"6f1bd799d4432704cadaac3d12117340787c0653e562e0be17d25a8e8d5f29d5"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.620281 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nj4vs" event={"ID":"e9552881-bebb-461b-b37a-3ce4f0a4de8f","Type":"ContainerStarted","Data":"2824f451c1f155babc31b2c87aa4b5ae474e508c55141793f60b0887797a2c6f"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.632606 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" event={"ID":"f8194841-4b1b-4477-8147-cea5ca9d791a","Type":"ContainerStarted","Data":"81c8c33a6b1b1a98dd14aaa24e80c0f1bcec3e9060e1b15ea1673efe2cc4112e"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.639126 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" podStartSLOduration=132.639110948 podStartE2EDuration="2m12.639110948s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.620499582 +0000 UTC m=+157.164838015" watchObservedRunningTime="2026-02-02 08:59:04.639110948 +0000 UTC m=+157.183449381" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.641287 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" event={"ID":"21ffc960-a1b7-45cd-a82a-9e2bfb7ab7d5","Type":"ContainerStarted","Data":"e7c0d548ffd577221134bc7e2ce0f2ba1aba81f3d5fe1c147d5b5e5e483043f2"} Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.660516 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l8xs2" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.672077 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.674792 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.174773881 +0000 UTC m=+157.719112384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.682599 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7vx8p" podStartSLOduration=132.682581331 podStartE2EDuration="2m12.682581331s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.64033738 +0000 UTC m=+157.184675823" watchObservedRunningTime="2026-02-02 08:59:04.682581331 +0000 UTC m=+157.226919764" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.716442 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-hrg7n" podStartSLOduration=7.716421178 podStartE2EDuration="7.716421178s" podCreationTimestamp="2026-02-02 08:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.685079005 +0000 UTC m=+157.229417438" watchObservedRunningTime="2026-02-02 08:59:04.716421178 +0000 UTC m=+157.260759611" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.717097 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" podStartSLOduration=131.717092905 podStartE2EDuration="2m11.717092905s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.715849003 +0000 UTC m=+157.260187436" watchObservedRunningTime="2026-02-02 08:59:04.717092905 +0000 UTC m=+157.261431338" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.759704 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" podStartSLOduration=131.759685625 podStartE2EDuration="2m11.759685625s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.758210798 +0000 UTC m=+157.302549231" watchObservedRunningTime="2026-02-02 08:59:04.759685625 +0000 UTC m=+157.304024058" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.773387 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.774745 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.27471075 +0000 UTC m=+157.819049183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.796751 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-q68ll" podStartSLOduration=131.796732214 podStartE2EDuration="2m11.796732214s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.793550912 +0000 UTC m=+157.337889335" watchObservedRunningTime="2026-02-02 08:59:04.796732214 +0000 UTC m=+157.341070647" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.856335 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9zx7z" podStartSLOduration=132.856316229 podStartE2EDuration="2m12.856316229s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.855392006 +0000 UTC m=+157.399730449" watchObservedRunningTime="2026-02-02 08:59:04.856316229 +0000 UTC m=+157.400654672" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.877857 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.878402 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.378386974 +0000 UTC m=+157.922725407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.892099 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tppnd" podStartSLOduration=131.892076165 podStartE2EDuration="2m11.892076165s" podCreationTimestamp="2026-02-02 08:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:04.891535691 +0000 UTC m=+157.435874134" watchObservedRunningTime="2026-02-02 08:59:04.892076165 +0000 UTC m=+157.436414608" Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.979060 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.979287 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.479242557 +0000 UTC m=+158.023580990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:04 crc kubenswrapper[4747]: I0202 08:59:04.979541 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:04 crc kubenswrapper[4747]: E0202 08:59:04.979865 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.479849122 +0000 UTC m=+158.024187555 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.081058 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.081261 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.581229628 +0000 UTC m=+158.125568061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.081327 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.081684 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.581674439 +0000 UTC m=+158.126012872 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.181964 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.182441 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.682403918 +0000 UTC m=+158.226742361 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.284408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.284792 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.784772329 +0000 UTC m=+158.329110762 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.320619 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:05 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Feb 02 08:59:05 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:05 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.320688 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.385748 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.385879 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.885857177 +0000 UTC m=+158.430195610 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.386013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.386353 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.886344879 +0000 UTC m=+158.430683312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.487168 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.487799 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:05.987778117 +0000 UTC m=+158.532116570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.589753 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.590129 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.090109696 +0000 UTC m=+158.634448209 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.648713 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" event={"ID":"f8194841-4b1b-4477-8147-cea5ca9d791a","Type":"ContainerStarted","Data":"619994f74146475205b1b949249af9902c7f9e394fa393a800a8e8d234b23e3d"} Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.653029 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pw275 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.665365 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.665475 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.690825 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.691356 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.191335328 +0000 UTC m=+158.735673761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.702041 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zlqc" Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.792253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.795389 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.295370572 +0000 UTC m=+158.839708995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.894598 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.894785 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.394758766 +0000 UTC m=+158.939097199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.894922 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.895203 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.395195277 +0000 UTC m=+158.939533710 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:05 crc kubenswrapper[4747]: I0202 08:59:05.996087 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:05 crc kubenswrapper[4747]: E0202 08:59:05.996312 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.496281856 +0000 UTC m=+159.040620289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.097861 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: E0202 08:59:06.098393 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.598378739 +0000 UTC m=+159.142717172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.164986 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t9rvz" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.199159 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:06 crc kubenswrapper[4747]: E0202 08:59:06.199333 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.699308024 +0000 UTC m=+159.243646457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.199387 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: E0202 08:59:06.199691 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.699684163 +0000 UTC m=+159.244022596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.300907 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:06 crc kubenswrapper[4747]: E0202 08:59:06.301027 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.801009298 +0000 UTC m=+159.345347731 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.301372 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: E0202 08:59:06.301645 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.801637104 +0000 UTC m=+159.345975537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.317588 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:06 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Feb 02 08:59:06 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:06 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.317637 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.402740 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:06 crc kubenswrapper[4747]: E0202 08:59:06.403041 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.903012229 +0000 UTC m=+159.447350662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.403170 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: E0202 08:59:06.403459 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 08:59:06.90345144 +0000 UTC m=+159.447789943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-25ddd" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.473706 4747 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.497009 4747 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-02T08:59:06.47374296Z","Handler":null,"Name":""} Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.501797 4747 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.501850 4747 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.504699 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.510462 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.606556 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.609384 4747 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.609423 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.632185 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-25ddd\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.656759 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" event={"ID":"f8194841-4b1b-4477-8147-cea5ca9d791a","Type":"ContainerStarted","Data":"80f0ef950716cf70ca78a33e14450cba0c5e0cf7d476524165e1ee1b7d36f641"} Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.656841 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" event={"ID":"f8194841-4b1b-4477-8147-cea5ca9d791a","Type":"ContainerStarted","Data":"57f0115233f3374ef77942177e4a51838b35bcc5dd7c1c2a3b35f0ff33b161c3"} Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.677260 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-z67kq" podStartSLOduration=9.677241089 podStartE2EDuration="9.677241089s" podCreationTimestamp="2026-02-02 08:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:06.677189168 +0000 UTC m=+159.221527621" watchObservedRunningTime="2026-02-02 08:59:06.677241089 +0000 UTC m=+159.221579532" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.828275 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.942335 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bmlc4"] Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.943601 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.947324 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 08:59:06 crc kubenswrapper[4747]: I0202 08:59:06.954244 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bmlc4"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.011678 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4kzn\" (UniqueName: \"kubernetes.io/projected/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-kube-api-access-j4kzn\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.011745 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-utilities\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.011796 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-catalog-content\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.026800 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-25ddd"] Feb 02 08:59:07 crc kubenswrapper[4747]: W0202 08:59:07.034893 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod943f883b_9752_44f0_b3ba_845f53d4b86e.slice/crio-86203a089a7a9caf15176b8ea771ce5efe362462f9e1f6bc75933dce7b13a868 WatchSource:0}: Error finding container 86203a089a7a9caf15176b8ea771ce5efe362462f9e1f6bc75933dce7b13a868: Status 404 returned error can't find the container with id 86203a089a7a9caf15176b8ea771ce5efe362462f9e1f6bc75933dce7b13a868 Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.113315 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-catalog-content\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.113415 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4kzn\" (UniqueName: \"kubernetes.io/projected/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-kube-api-access-j4kzn\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.113450 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-utilities\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.114290 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-catalog-content\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.114308 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-utilities\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.145022 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dpz98"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.149681 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.151232 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4kzn\" (UniqueName: \"kubernetes.io/projected/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-kube-api-access-j4kzn\") pod \"community-operators-bmlc4\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.153905 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.155650 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dpz98"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.214791 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v569z\" (UniqueName: \"kubernetes.io/projected/87ba3296-51e6-4641-acbc-e24b60ffe91c-kube-api-access-v569z\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.215103 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-catalog-content\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.215401 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-utilities\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.271427 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.316332 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v569z\" (UniqueName: \"kubernetes.io/projected/87ba3296-51e6-4641-acbc-e24b60ffe91c-kube-api-access-v569z\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.316755 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-catalog-content\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.316829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-utilities\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.317435 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-catalog-content\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.317470 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-utilities\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.319465 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:07 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Feb 02 08:59:07 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:07 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.319739 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.336056 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v569z\" (UniqueName: \"kubernetes.io/projected/87ba3296-51e6-4641-acbc-e24b60ffe91c-kube-api-access-v569z\") pod \"certified-operators-dpz98\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.341401 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pzgvn"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.342854 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.357152 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pzgvn"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.418320 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb5lh\" (UniqueName: \"kubernetes.io/projected/2e8d277b-4e88-425b-a33f-3d657972fd59-kube-api-access-zb5lh\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.418389 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-utilities\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.418430 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-catalog-content\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.486471 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bmlc4"] Feb 02 08:59:07 crc kubenswrapper[4747]: W0202 08:59:07.493279 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cdcfe28_8ae4_4938_8d64_d8255b92cf90.slice/crio-ac369f432dda821d974d7ba0d584819faf5dcd9ca30b16da771469182c883986 WatchSource:0}: Error finding container ac369f432dda821d974d7ba0d584819faf5dcd9ca30b16da771469182c883986: Status 404 returned error can't find the container with id ac369f432dda821d974d7ba0d584819faf5dcd9ca30b16da771469182c883986 Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.494090 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.521226 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb5lh\" (UniqueName: \"kubernetes.io/projected/2e8d277b-4e88-425b-a33f-3d657972fd59-kube-api-access-zb5lh\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.521303 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-utilities\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.521350 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-catalog-content\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.522339 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-utilities\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.522753 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-catalog-content\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.537375 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wspvt"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.538765 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.546203 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb5lh\" (UniqueName: \"kubernetes.io/projected/2e8d277b-4e88-425b-a33f-3d657972fd59-kube-api-access-zb5lh\") pod \"community-operators-pzgvn\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.549042 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wspvt"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.622975 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-catalog-content\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.623052 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-utilities\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.623113 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz9sn\" (UniqueName: \"kubernetes.io/projected/7935ede6-8dc0-421f-9296-fbd017061975-kube-api-access-vz9sn\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.666331 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" event={"ID":"943f883b-9752-44f0-b3ba-845f53d4b86e","Type":"ContainerStarted","Data":"56e6a5bc39fc32e9958b5ffa7f60bc829c13b6f09dc244260a8ed9d10ec3b0a3"} Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.666370 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" event={"ID":"943f883b-9752-44f0-b3ba-845f53d4b86e","Type":"ContainerStarted","Data":"86203a089a7a9caf15176b8ea771ce5efe362462f9e1f6bc75933dce7b13a868"} Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.667119 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.669641 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerStarted","Data":"c50f83d321194b14df33998a3f1fc355f885242cbcbbab4a20337bb1bb53af5d"} Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.669667 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerStarted","Data":"ac369f432dda821d974d7ba0d584819faf5dcd9ca30b16da771469182c883986"} Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.671224 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.683007 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dpz98"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.709995 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" podStartSLOduration=135.70997744 podStartE2EDuration="2m15.70997744s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:07.689920437 +0000 UTC m=+160.234258880" watchObservedRunningTime="2026-02-02 08:59:07.70997744 +0000 UTC m=+160.254315873" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.725992 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-catalog-content\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.726159 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-utilities\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.726310 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz9sn\" (UniqueName: \"kubernetes.io/projected/7935ede6-8dc0-421f-9296-fbd017061975-kube-api-access-vz9sn\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.726586 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-catalog-content\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.726674 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-utilities\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.748762 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz9sn\" (UniqueName: \"kubernetes.io/projected/7935ede6-8dc0-421f-9296-fbd017061975-kube-api-access-vz9sn\") pod \"certified-operators-wspvt\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: W0202 08:59:07.750013 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87ba3296_51e6_4641_acbc_e24b60ffe91c.slice/crio-51abeb0e0409ff9aef304fa69914309d28c06183f6d84d04c5518ceead739a22 WatchSource:0}: Error finding container 51abeb0e0409ff9aef304fa69914309d28c06183f6d84d04c5518ceead739a22: Status 404 returned error can't find the container with id 51abeb0e0409ff9aef304fa69914309d28c06183f6d84d04c5518ceead739a22 Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.854150 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pzgvn"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.859435 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.880197 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.881113 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.884125 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.886866 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 02 08:59:07 crc kubenswrapper[4747]: I0202 08:59:07.887018 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.033884 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19961133-de77-49b7-80bc-46a38ab3c5ce-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.033995 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19961133-de77-49b7-80bc-46a38ab3c5ce-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.043048 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.059479 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-g96zn" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.073353 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wspvt"] Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.134751 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19961133-de77-49b7-80bc-46a38ab3c5ce-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.134833 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19961133-de77-49b7-80bc-46a38ab3c5ce-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.134926 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19961133-de77-49b7-80bc-46a38ab3c5ce-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.136141 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-m55qb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.136195 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-m55qb" podUID="8da1d50c-554e-4e30-8cc2-cb52865e504c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.136702 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-m55qb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.136734 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m55qb" podUID="8da1d50c-554e-4e30-8cc2-cb52865e504c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.161775 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19961133-de77-49b7-80bc-46a38ab3c5ce-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.204556 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.327043 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:08 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Feb 02 08:59:08 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:08 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.327407 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.363492 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.488801 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 08:59:08 crc kubenswrapper[4747]: W0202 08:59:08.498066 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod19961133_de77_49b7_80bc_46a38ab3c5ce.slice/crio-3e7bb1e9335db85ad719850a5d6a4489c86a088e6875c15db07f8b94b02a6908 WatchSource:0}: Error finding container 3e7bb1e9335db85ad719850a5d6a4489c86a088e6875c15db07f8b94b02a6908: Status 404 returned error can't find the container with id 3e7bb1e9335db85ad719850a5d6a4489c86a088e6875c15db07f8b94b02a6908 Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.676918 4747 generic.go:334] "Generic (PLEG): container finished" podID="7935ede6-8dc0-421f-9296-fbd017061975" containerID="61aeaa3d0716225c85f6ea227e5b6fb037196627335ac9e811501583e4e47972" exitCode=0 Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.677062 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wspvt" event={"ID":"7935ede6-8dc0-421f-9296-fbd017061975","Type":"ContainerDied","Data":"61aeaa3d0716225c85f6ea227e5b6fb037196627335ac9e811501583e4e47972"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.677108 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wspvt" event={"ID":"7935ede6-8dc0-421f-9296-fbd017061975","Type":"ContainerStarted","Data":"63328b0f03c5d4e55e52c832fe104add0d2f73ed120e3ce4487366ac61219464"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.680262 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.680753 4747 generic.go:334] "Generic (PLEG): container finished" podID="d91844f8-e11c-4a91-86ae-be01d3d901fe" containerID="1f9d9574ff027eb138ac342b2e917c64efcc81cab83a0552a1a9e0f771e86b95" exitCode=0 Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.680842 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" event={"ID":"d91844f8-e11c-4a91-86ae-be01d3d901fe","Type":"ContainerDied","Data":"1f9d9574ff027eb138ac342b2e917c64efcc81cab83a0552a1a9e0f771e86b95"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.685410 4747 generic.go:334] "Generic (PLEG): container finished" podID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerID="8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf" exitCode=0 Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.685483 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzgvn" event={"ID":"2e8d277b-4e88-425b-a33f-3d657972fd59","Type":"ContainerDied","Data":"8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.686109 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzgvn" event={"ID":"2e8d277b-4e88-425b-a33f-3d657972fd59","Type":"ContainerStarted","Data":"d72af6d6b145d56377a65b0862ac16d0579821ab4b6556111dce4742805b790c"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.689721 4747 generic.go:334] "Generic (PLEG): container finished" podID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerID="8104835b4258d61d7dcc0b06b3453714df76b3eadaf4822ab2d16f35832ad542" exitCode=0 Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.689803 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpz98" event={"ID":"87ba3296-51e6-4641-acbc-e24b60ffe91c","Type":"ContainerDied","Data":"8104835b4258d61d7dcc0b06b3453714df76b3eadaf4822ab2d16f35832ad542"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.689827 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpz98" event={"ID":"87ba3296-51e6-4641-acbc-e24b60ffe91c","Type":"ContainerStarted","Data":"51abeb0e0409ff9aef304fa69914309d28c06183f6d84d04c5518ceead739a22"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.695172 4747 generic.go:334] "Generic (PLEG): container finished" podID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerID="c50f83d321194b14df33998a3f1fc355f885242cbcbbab4a20337bb1bb53af5d" exitCode=0 Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.695308 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerDied","Data":"c50f83d321194b14df33998a3f1fc355f885242cbcbbab4a20337bb1bb53af5d"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.698554 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19961133-de77-49b7-80bc-46a38ab3c5ce","Type":"ContainerStarted","Data":"3e7bb1e9335db85ad719850a5d6a4489c86a088e6875c15db07f8b94b02a6908"} Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.923106 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7gp42" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.940066 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5z2cn"] Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.941869 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.950293 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.970863 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5z2cn"] Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.983799 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.983865 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.985249 4747 patch_prober.go:28] interesting pod/console-f9d7485db-s6zqr container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Feb 02 08:59:08 crc kubenswrapper[4747]: I0202 08:59:08.985382 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-s6zqr" podUID="51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.048345 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-utilities\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.048449 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-catalog-content\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.048598 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxb2c\" (UniqueName: \"kubernetes.io/projected/52602e18-9a62-4ee3-bfa3-530eb601caa9-kube-api-access-vxb2c\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.150290 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxb2c\" (UniqueName: \"kubernetes.io/projected/52602e18-9a62-4ee3-bfa3-530eb601caa9-kube-api-access-vxb2c\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.150344 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-utilities\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.150381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-catalog-content\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.150843 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-catalog-content\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.150924 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-utilities\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.168340 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxb2c\" (UniqueName: \"kubernetes.io/projected/52602e18-9a62-4ee3-bfa3-530eb601caa9-kube-api-access-vxb2c\") pod \"redhat-marketplace-5z2cn\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.269746 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.319529 4747 patch_prober.go:28] interesting pod/router-default-5444994796-prs7q container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 08:59:09 crc kubenswrapper[4747]: [+]has-synced ok Feb 02 08:59:09 crc kubenswrapper[4747]: [+]process-running ok Feb 02 08:59:09 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.319898 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-prs7q" podUID="5a855422-3b25-4bdd-9df5-10a39f02225e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.342003 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9z22h"] Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.343198 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.356156 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9z22h"] Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.457128 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-catalog-content\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.457177 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-utilities\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.457242 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlcvn\" (UniqueName: \"kubernetes.io/projected/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-kube-api-access-tlcvn\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.558585 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlcvn\" (UniqueName: \"kubernetes.io/projected/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-kube-api-access-tlcvn\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.558673 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-catalog-content\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.558703 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-utilities\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.559250 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-utilities\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.559484 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-catalog-content\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.580421 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlcvn\" (UniqueName: \"kubernetes.io/projected/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-kube-api-access-tlcvn\") pod \"redhat-marketplace-9z22h\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.669526 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5z2cn"] Feb 02 08:59:09 crc kubenswrapper[4747]: W0202 08:59:09.676773 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52602e18_9a62_4ee3_bfa3_530eb601caa9.slice/crio-4a56f6dfc2b08619d58446b67998cc8e596f2108e610c38296c10836c0a03fbf WatchSource:0}: Error finding container 4a56f6dfc2b08619d58446b67998cc8e596f2108e610c38296c10836c0a03fbf: Status 404 returned error can't find the container with id 4a56f6dfc2b08619d58446b67998cc8e596f2108e610c38296c10836c0a03fbf Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.704129 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19961133-de77-49b7-80bc-46a38ab3c5ce","Type":"ContainerStarted","Data":"3a459c1068ac303b8fb59a61f64bb4777ee10a121b33cba8b7c99777a7fad9ed"} Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.707204 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5z2cn" event={"ID":"52602e18-9a62-4ee3-bfa3-530eb601caa9","Type":"ContainerStarted","Data":"4a56f6dfc2b08619d58446b67998cc8e596f2108e610c38296c10836c0a03fbf"} Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.718342 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.71832285 podStartE2EDuration="2.71832285s" podCreationTimestamp="2026-02-02 08:59:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:09.71713148 +0000 UTC m=+162.261469913" watchObservedRunningTime="2026-02-02 08:59:09.71832285 +0000 UTC m=+162.262661283" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.733889 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.911210 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.963719 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz6ll\" (UniqueName: \"kubernetes.io/projected/d91844f8-e11c-4a91-86ae-be01d3d901fe-kube-api-access-tz6ll\") pod \"d91844f8-e11c-4a91-86ae-be01d3d901fe\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.963851 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d91844f8-e11c-4a91-86ae-be01d3d901fe-secret-volume\") pod \"d91844f8-e11c-4a91-86ae-be01d3d901fe\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.963879 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d91844f8-e11c-4a91-86ae-be01d3d901fe-config-volume\") pod \"d91844f8-e11c-4a91-86ae-be01d3d901fe\" (UID: \"d91844f8-e11c-4a91-86ae-be01d3d901fe\") " Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.965083 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d91844f8-e11c-4a91-86ae-be01d3d901fe-config-volume" (OuterVolumeSpecName: "config-volume") pod "d91844f8-e11c-4a91-86ae-be01d3d901fe" (UID: "d91844f8-e11c-4a91-86ae-be01d3d901fe"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.968499 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d91844f8-e11c-4a91-86ae-be01d3d901fe-kube-api-access-tz6ll" (OuterVolumeSpecName: "kube-api-access-tz6ll") pod "d91844f8-e11c-4a91-86ae-be01d3d901fe" (UID: "d91844f8-e11c-4a91-86ae-be01d3d901fe"). InnerVolumeSpecName "kube-api-access-tz6ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:09 crc kubenswrapper[4747]: I0202 08:59:09.971552 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d91844f8-e11c-4a91-86ae-be01d3d901fe-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d91844f8-e11c-4a91-86ae-be01d3d901fe" (UID: "d91844f8-e11c-4a91-86ae-be01d3d901fe"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.065718 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d91844f8-e11c-4a91-86ae-be01d3d901fe-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.065757 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d91844f8-e11c-4a91-86ae-be01d3d901fe-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.065771 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz6ll\" (UniqueName: \"kubernetes.io/projected/d91844f8-e11c-4a91-86ae-be01d3d901fe-kube-api-access-tz6ll\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.149121 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fpzd5"] Feb 02 08:59:10 crc kubenswrapper[4747]: E0202 08:59:10.149361 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91844f8-e11c-4a91-86ae-be01d3d901fe" containerName="collect-profiles" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.149374 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91844f8-e11c-4a91-86ae-be01d3d901fe" containerName="collect-profiles" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.149487 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91844f8-e11c-4a91-86ae-be01d3d901fe" containerName="collect-profiles" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.150279 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.153748 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.155397 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9z22h"] Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.164179 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fpzd5"] Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.272463 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-catalog-content\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.272509 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sswdn\" (UniqueName: \"kubernetes.io/projected/557f0623-5fe6-48cd-a958-88330d792ba8-kube-api-access-sswdn\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.272554 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-utilities\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.316085 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.323729 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.360491 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.373614 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-catalog-content\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.373660 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sswdn\" (UniqueName: \"kubernetes.io/projected/557f0623-5fe6-48cd-a958-88330d792ba8-kube-api-access-sswdn\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.373725 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-utilities\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.374754 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-catalog-content\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.374893 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-utilities\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.421176 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sswdn\" (UniqueName: \"kubernetes.io/projected/557f0623-5fe6-48cd-a958-88330d792ba8-kube-api-access-sswdn\") pod \"redhat-operators-fpzd5\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.487646 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.544542 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p8jt9"] Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.545762 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.558667 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8jt9"] Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.676852 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq2nn\" (UniqueName: \"kubernetes.io/projected/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-kube-api-access-cq2nn\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.677266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-utilities\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.677307 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-catalog-content\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.712848 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" event={"ID":"d91844f8-e11c-4a91-86ae-be01d3d901fe","Type":"ContainerDied","Data":"d3ee0164872986366c709ba292f34712d14c4f0006ee10e81751c835302ea292"} Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.712888 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3ee0164872986366c709ba292f34712d14c4f0006ee10e81751c835302ea292" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.712900 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.716640 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9z22h" event={"ID":"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b","Type":"ContainerStarted","Data":"1d9952843a957fa43eaa916e2a1dc5bab2c1cdd954c9d851c403b1177ec9c0d7"} Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.722476 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-prs7q" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.736188 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fpzd5"] Feb 02 08:59:10 crc kubenswrapper[4747]: W0202 08:59:10.748506 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod557f0623_5fe6_48cd_a958_88330d792ba8.slice/crio-301b44488a09d51ab18526215f5968b577b2d89c67a08204c0f66aedb44b2d74 WatchSource:0}: Error finding container 301b44488a09d51ab18526215f5968b577b2d89c67a08204c0f66aedb44b2d74: Status 404 returned error can't find the container with id 301b44488a09d51ab18526215f5968b577b2d89c67a08204c0f66aedb44b2d74 Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.778719 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq2nn\" (UniqueName: \"kubernetes.io/projected/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-kube-api-access-cq2nn\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.778780 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-utilities\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.778830 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-catalog-content\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.779981 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-catalog-content\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.780288 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-utilities\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.809225 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq2nn\" (UniqueName: \"kubernetes.io/projected/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-kube-api-access-cq2nn\") pod \"redhat-operators-p8jt9\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.858415 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.859333 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.861382 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.861389 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.861691 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.937577 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.981809 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:10 crc kubenswrapper[4747]: I0202 08:59:10.981992 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.082960 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.083392 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.083068 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.160190 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8jt9"] Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.218568 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.248370 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.512824 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 08:59:11 crc kubenswrapper[4747]: W0202 08:59:11.523950 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode6905ab8_61cf_4269_adc3_68ebdda1bf73.slice/crio-c326c7eee909ae247ca869652afd011858c59c35d9f62f8ff8487e62e56f3c87 WatchSource:0}: Error finding container c326c7eee909ae247ca869652afd011858c59c35d9f62f8ff8487e62e56f3c87: Status 404 returned error can't find the container with id c326c7eee909ae247ca869652afd011858c59c35d9f62f8ff8487e62e56f3c87 Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.724291 4747 generic.go:334] "Generic (PLEG): container finished" podID="19961133-de77-49b7-80bc-46a38ab3c5ce" containerID="3a459c1068ac303b8fb59a61f64bb4777ee10a121b33cba8b7c99777a7fad9ed" exitCode=0 Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.724383 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19961133-de77-49b7-80bc-46a38ab3c5ce","Type":"ContainerDied","Data":"3a459c1068ac303b8fb59a61f64bb4777ee10a121b33cba8b7c99777a7fad9ed"} Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.726148 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jt9" event={"ID":"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989","Type":"ContainerStarted","Data":"a895938f8cb8b65e4a8cc5d4c218adcd82a1502a517361f29b4ccc8ccd40ecee"} Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.728033 4747 generic.go:334] "Generic (PLEG): container finished" podID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerID="87af9c722640eb65a3f8c932ee5aad89d4e2246ca747a004f23a0de6eed8f8fd" exitCode=0 Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.728104 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5z2cn" event={"ID":"52602e18-9a62-4ee3-bfa3-530eb601caa9","Type":"ContainerDied","Data":"87af9c722640eb65a3f8c932ee5aad89d4e2246ca747a004f23a0de6eed8f8fd"} Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.729224 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6905ab8-61cf-4269-adc3-68ebdda1bf73","Type":"ContainerStarted","Data":"c326c7eee909ae247ca869652afd011858c59c35d9f62f8ff8487e62e56f3c87"} Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.730555 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpzd5" event={"ID":"557f0623-5fe6-48cd-a958-88330d792ba8","Type":"ContainerStarted","Data":"301b44488a09d51ab18526215f5968b577b2d89c67a08204c0f66aedb44b2d74"} Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.732349 4747 generic.go:334] "Generic (PLEG): container finished" podID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerID="9c8f68331ee732c6da8940865a683a2402d95c9a52097c2a8e819eced4a64fba" exitCode=0 Feb 02 08:59:11 crc kubenswrapper[4747]: I0202 08:59:11.733044 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9z22h" event={"ID":"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b","Type":"ContainerDied","Data":"9c8f68331ee732c6da8940865a683a2402d95c9a52097c2a8e819eced4a64fba"} Feb 02 08:59:12 crc kubenswrapper[4747]: I0202 08:59:12.167813 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-hrg7n" Feb 02 08:59:12 crc kubenswrapper[4747]: I0202 08:59:12.739286 4747 generic.go:334] "Generic (PLEG): container finished" podID="557f0623-5fe6-48cd-a958-88330d792ba8" containerID="6eb38d8fb2c4a8d7e20479c997401ce459bc92c121ef2017b359432cb97e68f2" exitCode=0 Feb 02 08:59:12 crc kubenswrapper[4747]: I0202 08:59:12.739408 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpzd5" event={"ID":"557f0623-5fe6-48cd-a958-88330d792ba8","Type":"ContainerDied","Data":"6eb38d8fb2c4a8d7e20479c997401ce459bc92c121ef2017b359432cb97e68f2"} Feb 02 08:59:12 crc kubenswrapper[4747]: I0202 08:59:12.997461 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.131303 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19961133-de77-49b7-80bc-46a38ab3c5ce-kube-api-access\") pod \"19961133-de77-49b7-80bc-46a38ab3c5ce\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.131862 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19961133-de77-49b7-80bc-46a38ab3c5ce-kubelet-dir\") pod \"19961133-de77-49b7-80bc-46a38ab3c5ce\" (UID: \"19961133-de77-49b7-80bc-46a38ab3c5ce\") " Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.131952 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/19961133-de77-49b7-80bc-46a38ab3c5ce-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "19961133-de77-49b7-80bc-46a38ab3c5ce" (UID: "19961133-de77-49b7-80bc-46a38ab3c5ce"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.132165 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19961133-de77-49b7-80bc-46a38ab3c5ce-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.136278 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19961133-de77-49b7-80bc-46a38ab3c5ce-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "19961133-de77-49b7-80bc-46a38ab3c5ce" (UID: "19961133-de77-49b7-80bc-46a38ab3c5ce"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.233860 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19961133-de77-49b7-80bc-46a38ab3c5ce-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.745769 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19961133-de77-49b7-80bc-46a38ab3c5ce","Type":"ContainerDied","Data":"3e7bb1e9335db85ad719850a5d6a4489c86a088e6875c15db07f8b94b02a6908"} Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.745816 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e7bb1e9335db85ad719850a5d6a4489c86a088e6875c15db07f8b94b02a6908" Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.745884 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.752121 4747 generic.go:334] "Generic (PLEG): container finished" podID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerID="2bf33c8d468ce2b262e859b0d2c4598eeb2b09a697c1225f071f4cff05c7c4e2" exitCode=0 Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.752200 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jt9" event={"ID":"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989","Type":"ContainerDied","Data":"2bf33c8d468ce2b262e859b0d2c4598eeb2b09a697c1225f071f4cff05c7c4e2"} Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.754806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6905ab8-61cf-4269-adc3-68ebdda1bf73","Type":"ContainerStarted","Data":"3df378a28933f63e2ebccbe9f0278779cdfb3ce853ebb7b85a6fccee24617b4c"} Feb 02 08:59:13 crc kubenswrapper[4747]: I0202 08:59:13.799851 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.799829797 podStartE2EDuration="3.799829797s" podCreationTimestamp="2026-02-02 08:59:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:13.791395291 +0000 UTC m=+166.335733724" watchObservedRunningTime="2026-02-02 08:59:13.799829797 +0000 UTC m=+166.344168230" Feb 02 08:59:14 crc kubenswrapper[4747]: I0202 08:59:14.765357 4747 generic.go:334] "Generic (PLEG): container finished" podID="e6905ab8-61cf-4269-adc3-68ebdda1bf73" containerID="3df378a28933f63e2ebccbe9f0278779cdfb3ce853ebb7b85a6fccee24617b4c" exitCode=0 Feb 02 08:59:14 crc kubenswrapper[4747]: I0202 08:59:14.765403 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6905ab8-61cf-4269-adc3-68ebdda1bf73","Type":"ContainerDied","Data":"3df378a28933f63e2ebccbe9f0278779cdfb3ce853ebb7b85a6fccee24617b4c"} Feb 02 08:59:15 crc kubenswrapper[4747]: I0202 08:59:15.066926 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:59:15 crc kubenswrapper[4747]: I0202 08:59:15.085822 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/83246640-90cc-4bd6-b508-9e2ebdcda8c2-metrics-certs\") pod \"network-metrics-daemon-t5t4m\" (UID: \"83246640-90cc-4bd6-b508-9e2ebdcda8c2\") " pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:59:15 crc kubenswrapper[4747]: I0202 08:59:15.258204 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t5t4m" Feb 02 08:59:15 crc kubenswrapper[4747]: I0202 08:59:15.605403 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-t5t4m"] Feb 02 08:59:15 crc kubenswrapper[4747]: W0202 08:59:15.621120 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83246640_90cc_4bd6_b508_9e2ebdcda8c2.slice/crio-6bc388850146f5b8228c3239b825db27ded53c6ce656a1f1a297af4d451f72eb WatchSource:0}: Error finding container 6bc388850146f5b8228c3239b825db27ded53c6ce656a1f1a297af4d451f72eb: Status 404 returned error can't find the container with id 6bc388850146f5b8228c3239b825db27ded53c6ce656a1f1a297af4d451f72eb Feb 02 08:59:15 crc kubenswrapper[4747]: I0202 08:59:15.778217 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" event={"ID":"83246640-90cc-4bd6-b508-9e2ebdcda8c2","Type":"ContainerStarted","Data":"6bc388850146f5b8228c3239b825db27ded53c6ce656a1f1a297af4d451f72eb"} Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.147794 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.287576 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kubelet-dir\") pod \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.287684 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e6905ab8-61cf-4269-adc3-68ebdda1bf73" (UID: "e6905ab8-61cf-4269-adc3-68ebdda1bf73"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.287716 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kube-api-access\") pod \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\" (UID: \"e6905ab8-61cf-4269-adc3-68ebdda1bf73\") " Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.288053 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.310388 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e6905ab8-61cf-4269-adc3-68ebdda1bf73" (UID: "e6905ab8-61cf-4269-adc3-68ebdda1bf73"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.389038 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6905ab8-61cf-4269-adc3-68ebdda1bf73-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.785962 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" event={"ID":"83246640-90cc-4bd6-b508-9e2ebdcda8c2","Type":"ContainerStarted","Data":"dc384159f782e11f0ea2c21bbc584661b5cd58dd85e65fc48c3d47d2f8a342c4"} Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.788761 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6905ab8-61cf-4269-adc3-68ebdda1bf73","Type":"ContainerDied","Data":"c326c7eee909ae247ca869652afd011858c59c35d9f62f8ff8487e62e56f3c87"} Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.788782 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c326c7eee909ae247ca869652afd011858c59c35d9f62f8ff8487e62e56f3c87" Feb 02 08:59:16 crc kubenswrapper[4747]: I0202 08:59:16.788832 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 08:59:17 crc kubenswrapper[4747]: I0202 08:59:17.807236 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t5t4m" event={"ID":"83246640-90cc-4bd6-b508-9e2ebdcda8c2","Type":"ContainerStarted","Data":"0f81e752a2ecd59474cb4d8cab5a74d8749ea688847dfbcf3caf1238d8028895"} Feb 02 08:59:17 crc kubenswrapper[4747]: I0202 08:59:17.826372 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-t5t4m" podStartSLOduration=145.826234254 podStartE2EDuration="2m25.826234254s" podCreationTimestamp="2026-02-02 08:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:17.82180132 +0000 UTC m=+170.366139753" watchObservedRunningTime="2026-02-02 08:59:17.826234254 +0000 UTC m=+170.370572687" Feb 02 08:59:18 crc kubenswrapper[4747]: I0202 08:59:18.136369 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-m55qb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Feb 02 08:59:18 crc kubenswrapper[4747]: I0202 08:59:18.136430 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m55qb" podUID="8da1d50c-554e-4e30-8cc2-cb52865e504c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Feb 02 08:59:18 crc kubenswrapper[4747]: I0202 08:59:18.138557 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-m55qb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Feb 02 08:59:18 crc kubenswrapper[4747]: I0202 08:59:18.138609 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-m55qb" podUID="8da1d50c-554e-4e30-8cc2-cb52865e504c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Feb 02 08:59:19 crc kubenswrapper[4747]: I0202 08:59:19.048254 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:59:19 crc kubenswrapper[4747]: I0202 08:59:19.052671 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 08:59:20 crc kubenswrapper[4747]: I0202 08:59:20.518600 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 08:59:20 crc kubenswrapper[4747]: I0202 08:59:20.518673 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.385178 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wq8xw"] Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.385518 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq"] Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.385759 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" podUID="00c5206f-41e3-4fc3-851e-febfc74613a3" containerName="route-controller-manager" containerID="cri-o://1954b223abd2279b745f6f5eab08cf069a10991fcde602eeddaefc1167d23b01" gracePeriod=30 Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.385901 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" podUID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" containerName="controller-manager" containerID="cri-o://522786370ee79bc1617c61c4bba3d4cc2d4ed1e8651bab96c7d012b0ac747e78" gracePeriod=30 Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.841892 4747 generic.go:334] "Generic (PLEG): container finished" podID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" containerID="522786370ee79bc1617c61c4bba3d4cc2d4ed1e8651bab96c7d012b0ac747e78" exitCode=0 Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.841982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" event={"ID":"4e7a8080-f368-4138-911b-b9d6e4c84ea1","Type":"ContainerDied","Data":"522786370ee79bc1617c61c4bba3d4cc2d4ed1e8651bab96c7d012b0ac747e78"} Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.843779 4747 generic.go:334] "Generic (PLEG): container finished" podID="00c5206f-41e3-4fc3-851e-febfc74613a3" containerID="1954b223abd2279b745f6f5eab08cf069a10991fcde602eeddaefc1167d23b01" exitCode=0 Feb 02 08:59:22 crc kubenswrapper[4747]: I0202 08:59:22.843849 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" event={"ID":"00c5206f-41e3-4fc3-851e-febfc74613a3","Type":"ContainerDied","Data":"1954b223abd2279b745f6f5eab08cf069a10991fcde602eeddaefc1167d23b01"} Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.863417 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.868833 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" event={"ID":"4e7a8080-f368-4138-911b-b9d6e4c84ea1","Type":"ContainerDied","Data":"bd47d7f952af4d21f853785c72e729e66d8d9e9dc43c27dbb92e0706f12f41d3"} Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.868892 4747 scope.go:117] "RemoveContainer" containerID="522786370ee79bc1617c61c4bba3d4cc2d4ed1e8651bab96c7d012b0ac747e78" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.870153 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.871074 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" event={"ID":"00c5206f-41e3-4fc3-851e-febfc74613a3","Type":"ContainerDied","Data":"c9afce8839087d81921a6d5a1c92181b8a00f3a6ff2ddd105a018b4722e8d1c2"} Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.895488 4747 scope.go:117] "RemoveContainer" containerID="1954b223abd2279b745f6f5eab08cf069a10991fcde602eeddaefc1167d23b01" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900555 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-98c89bd56-vhhh5"] Feb 02 08:59:24 crc kubenswrapper[4747]: E0202 08:59:24.900768 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6905ab8-61cf-4269-adc3-68ebdda1bf73" containerName="pruner" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900780 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6905ab8-61cf-4269-adc3-68ebdda1bf73" containerName="pruner" Feb 02 08:59:24 crc kubenswrapper[4747]: E0202 08:59:24.900795 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19961133-de77-49b7-80bc-46a38ab3c5ce" containerName="pruner" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900802 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="19961133-de77-49b7-80bc-46a38ab3c5ce" containerName="pruner" Feb 02 08:59:24 crc kubenswrapper[4747]: E0202 08:59:24.900815 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00c5206f-41e3-4fc3-851e-febfc74613a3" containerName="route-controller-manager" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900822 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="00c5206f-41e3-4fc3-851e-febfc74613a3" containerName="route-controller-manager" Feb 02 08:59:24 crc kubenswrapper[4747]: E0202 08:59:24.900832 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" containerName="controller-manager" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900838 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" containerName="controller-manager" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900938 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="00c5206f-41e3-4fc3-851e-febfc74613a3" containerName="route-controller-manager" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900963 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" containerName="controller-manager" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900977 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6905ab8-61cf-4269-adc3-68ebdda1bf73" containerName="pruner" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.900991 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="19961133-de77-49b7-80bc-46a38ab3c5ce" containerName="pruner" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.901391 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.922148 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-98c89bd56-vhhh5"] Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926073 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9sdm\" (UniqueName: \"kubernetes.io/projected/00c5206f-41e3-4fc3-851e-febfc74613a3-kube-api-access-k9sdm\") pod \"00c5206f-41e3-4fc3-851e-febfc74613a3\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926225 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00c5206f-41e3-4fc3-851e-febfc74613a3-serving-cert\") pod \"00c5206f-41e3-4fc3-851e-febfc74613a3\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926385 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-client-ca\") pod \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926496 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thbsw\" (UniqueName: \"kubernetes.io/projected/4e7a8080-f368-4138-911b-b9d6e4c84ea1-kube-api-access-thbsw\") pod \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926552 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7a8080-f368-4138-911b-b9d6e4c84ea1-serving-cert\") pod \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926613 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-proxy-ca-bundles\") pod \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926657 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-config\") pod \"00c5206f-41e3-4fc3-851e-febfc74613a3\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926713 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-config\") pod \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\" (UID: \"4e7a8080-f368-4138-911b-b9d6e4c84ea1\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.926776 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-client-ca\") pod \"00c5206f-41e3-4fc3-851e-febfc74613a3\" (UID: \"00c5206f-41e3-4fc3-851e-febfc74613a3\") " Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.929269 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-client-ca" (OuterVolumeSpecName: "client-ca") pod "00c5206f-41e3-4fc3-851e-febfc74613a3" (UID: "00c5206f-41e3-4fc3-851e-febfc74613a3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.930557 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-config" (OuterVolumeSpecName: "config") pod "00c5206f-41e3-4fc3-851e-febfc74613a3" (UID: "00c5206f-41e3-4fc3-851e-febfc74613a3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.931238 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4e7a8080-f368-4138-911b-b9d6e4c84ea1" (UID: "4e7a8080-f368-4138-911b-b9d6e4c84ea1"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.932286 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-client-ca" (OuterVolumeSpecName: "client-ca") pod "4e7a8080-f368-4138-911b-b9d6e4c84ea1" (UID: "4e7a8080-f368-4138-911b-b9d6e4c84ea1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.932536 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-config" (OuterVolumeSpecName: "config") pod "4e7a8080-f368-4138-911b-b9d6e4c84ea1" (UID: "4e7a8080-f368-4138-911b-b9d6e4c84ea1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.941460 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00c5206f-41e3-4fc3-851e-febfc74613a3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "00c5206f-41e3-4fc3-851e-febfc74613a3" (UID: "00c5206f-41e3-4fc3-851e-febfc74613a3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.941715 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7a8080-f368-4138-911b-b9d6e4c84ea1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4e7a8080-f368-4138-911b-b9d6e4c84ea1" (UID: "4e7a8080-f368-4138-911b-b9d6e4c84ea1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.941917 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7a8080-f368-4138-911b-b9d6e4c84ea1-kube-api-access-thbsw" (OuterVolumeSpecName: "kube-api-access-thbsw") pod "4e7a8080-f368-4138-911b-b9d6e4c84ea1" (UID: "4e7a8080-f368-4138-911b-b9d6e4c84ea1"). InnerVolumeSpecName "kube-api-access-thbsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:24 crc kubenswrapper[4747]: I0202 08:59:24.944632 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00c5206f-41e3-4fc3-851e-febfc74613a3-kube-api-access-k9sdm" (OuterVolumeSpecName: "kube-api-access-k9sdm") pod "00c5206f-41e3-4fc3-851e-febfc74613a3" (UID: "00c5206f-41e3-4fc3-851e-febfc74613a3"). InnerVolumeSpecName "kube-api-access-k9sdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.028741 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-proxy-ca-bundles\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.028871 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeff741f-a126-41f7-b138-1437e07fdf8f-serving-cert\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.028904 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-config\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.028939 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24ph7\" (UniqueName: \"kubernetes.io/projected/eeff741f-a126-41f7-b138-1437e07fdf8f-kube-api-access-24ph7\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.028991 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-client-ca\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029118 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029145 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029158 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029169 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/00c5206f-41e3-4fc3-851e-febfc74613a3-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029181 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9sdm\" (UniqueName: \"kubernetes.io/projected/00c5206f-41e3-4fc3-851e-febfc74613a3-kube-api-access-k9sdm\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029193 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00c5206f-41e3-4fc3-851e-febfc74613a3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029203 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7a8080-f368-4138-911b-b9d6e4c84ea1-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029214 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thbsw\" (UniqueName: \"kubernetes.io/projected/4e7a8080-f368-4138-911b-b9d6e4c84ea1-kube-api-access-thbsw\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.029224 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7a8080-f368-4138-911b-b9d6e4c84ea1-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.130850 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeff741f-a126-41f7-b138-1437e07fdf8f-serving-cert\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.130906 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-config\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.130938 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24ph7\" (UniqueName: \"kubernetes.io/projected/eeff741f-a126-41f7-b138-1437e07fdf8f-kube-api-access-24ph7\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.131028 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-client-ca\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.131072 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-proxy-ca-bundles\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.132698 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-proxy-ca-bundles\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.132697 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-client-ca\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.133145 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-config\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.134434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeff741f-a126-41f7-b138-1437e07fdf8f-serving-cert\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.151211 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24ph7\" (UniqueName: \"kubernetes.io/projected/eeff741f-a126-41f7-b138-1437e07fdf8f-kube-api-access-24ph7\") pod \"controller-manager-98c89bd56-vhhh5\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.222422 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.425586 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-98c89bd56-vhhh5"] Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.888911 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" event={"ID":"eeff741f-a126-41f7-b138-1437e07fdf8f","Type":"ContainerStarted","Data":"d3eda70847d65498da5cc5a682f2c5f40483a76f9b4791ff4027691d817c08ff"} Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.888998 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" event={"ID":"eeff741f-a126-41f7-b138-1437e07fdf8f","Type":"ContainerStarted","Data":"8494dae79f13a8c0dfc015f9f109a721661fb8d3ae9c1d7850e97acd4c51c280"} Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.890526 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.902693 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wq8xw" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.913768 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.919244 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" podStartSLOduration=3.919228307 podStartE2EDuration="3.919228307s" podCreationTimestamp="2026-02-02 08:59:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:25.916473717 +0000 UTC m=+178.460812140" watchObservedRunningTime="2026-02-02 08:59:25.919228307 +0000 UTC m=+178.463566740" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.943536 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.956810 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wq8xw"] Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.962550 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wq8xw"] Feb 02 08:59:25 crc kubenswrapper[4747]: I0202 08:59:25.998751 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq"] Feb 02 08:59:26 crc kubenswrapper[4747]: I0202 08:59:26.002227 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-9brdq"] Feb 02 08:59:26 crc kubenswrapper[4747]: I0202 08:59:26.348693 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00c5206f-41e3-4fc3-851e-febfc74613a3" path="/var/lib/kubelet/pods/00c5206f-41e3-4fc3-851e-febfc74613a3/volumes" Feb 02 08:59:26 crc kubenswrapper[4747]: I0202 08:59:26.350296 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e7a8080-f368-4138-911b-b9d6e4c84ea1" path="/var/lib/kubelet/pods/4e7a8080-f368-4138-911b-b9d6e4c84ea1/volumes" Feb 02 08:59:26 crc kubenswrapper[4747]: I0202 08:59:26.833501 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.878020 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz"] Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.879048 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.884125 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.884464 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.888112 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz"] Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.889281 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.889377 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.889569 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.889710 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.968203 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-client-ca\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.968463 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8113a24-4c89-4398-9d39-268cd40930ae-serving-cert\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.968590 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-config\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:27 crc kubenswrapper[4747]: I0202 08:59:27.968779 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tpmk\" (UniqueName: \"kubernetes.io/projected/c8113a24-4c89-4398-9d39-268cd40930ae-kube-api-access-7tpmk\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.070196 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-client-ca\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.070234 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8113a24-4c89-4398-9d39-268cd40930ae-serving-cert\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.070260 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-config\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.070319 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tpmk\" (UniqueName: \"kubernetes.io/projected/c8113a24-4c89-4398-9d39-268cd40930ae-kube-api-access-7tpmk\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.071810 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-config\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.072547 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-client-ca\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.087351 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8113a24-4c89-4398-9d39-268cd40930ae-serving-cert\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.087593 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tpmk\" (UniqueName: \"kubernetes.io/projected/c8113a24-4c89-4398-9d39-268cd40930ae-kube-api-access-7tpmk\") pod \"route-controller-manager-7b9c54866d-6fqzz\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.144302 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-m55qb" Feb 02 08:59:28 crc kubenswrapper[4747]: I0202 08:59:28.204636 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:36 crc kubenswrapper[4747]: I0202 08:59:36.467446 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 08:59:40 crc kubenswrapper[4747]: I0202 08:59:40.398098 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z2hbw" Feb 02 08:59:41 crc kubenswrapper[4747]: E0202 08:59:41.481909 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 02 08:59:41 crc kubenswrapper[4747]: E0202 08:59:41.482485 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vxb2c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-5z2cn_openshift-marketplace(52602e18-9a62-4ee3-bfa3-530eb601caa9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 08:59:41 crc kubenswrapper[4747]: E0202 08:59:41.483886 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-5z2cn" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" Feb 02 08:59:42 crc kubenswrapper[4747]: I0202 08:59:42.349382 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-98c89bd56-vhhh5"] Feb 02 08:59:42 crc kubenswrapper[4747]: I0202 08:59:42.350272 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" podUID="eeff741f-a126-41f7-b138-1437e07fdf8f" containerName="controller-manager" containerID="cri-o://d3eda70847d65498da5cc5a682f2c5f40483a76f9b4791ff4027691d817c08ff" gracePeriod=30 Feb 02 08:59:42 crc kubenswrapper[4747]: I0202 08:59:42.434990 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz"] Feb 02 08:59:43 crc kubenswrapper[4747]: I0202 08:59:43.022383 4747 generic.go:334] "Generic (PLEG): container finished" podID="eeff741f-a126-41f7-b138-1437e07fdf8f" containerID="d3eda70847d65498da5cc5a682f2c5f40483a76f9b4791ff4027691d817c08ff" exitCode=0 Feb 02 08:59:43 crc kubenswrapper[4747]: I0202 08:59:43.022471 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" event={"ID":"eeff741f-a126-41f7-b138-1437e07fdf8f","Type":"ContainerDied","Data":"d3eda70847d65498da5cc5a682f2c5f40483a76f9b4791ff4027691d817c08ff"} Feb 02 08:59:43 crc kubenswrapper[4747]: E0202 08:59:43.689995 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 08:59:43 crc kubenswrapper[4747]: E0202 08:59:43.690226 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vz9sn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wspvt_openshift-marketplace(7935ede6-8dc0-421f-9296-fbd017061975): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 08:59:43 crc kubenswrapper[4747]: E0202 08:59:43.691753 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wspvt" podUID="7935ede6-8dc0-421f-9296-fbd017061975" Feb 02 08:59:43 crc kubenswrapper[4747]: E0202 08:59:43.974299 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 08:59:43 crc kubenswrapper[4747]: E0202 08:59:43.974462 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v569z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dpz98_openshift-marketplace(87ba3296-51e6-4641-acbc-e24b60ffe91c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 08:59:43 crc kubenswrapper[4747]: E0202 08:59:43.976411 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dpz98" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" Feb 02 08:59:44 crc kubenswrapper[4747]: E0202 08:59:44.142231 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 08:59:44 crc kubenswrapper[4747]: E0202 08:59:44.142424 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cq2nn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-p8jt9_openshift-marketplace(30faf8f7-e02e-4fbd-b23b-6ad3c9e71989): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 08:59:44 crc kubenswrapper[4747]: E0202 08:59:44.143576 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-p8jt9" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.224453 4747 patch_prober.go:28] interesting pod/controller-manager-98c89bd56-vhhh5 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.54:8443/healthz\": dial tcp 10.217.0.54:8443: connect: connection refused" start-of-body= Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.224831 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" podUID="eeff741f-a126-41f7-b138-1437e07fdf8f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.54:8443/healthz\": dial tcp 10.217.0.54:8443: connect: connection refused" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.298005 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-dpz98" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.298103 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-p8jt9" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.298161 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wspvt" podUID="7935ede6-8dc0-421f-9296-fbd017061975" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.384684 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.384832 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zb5lh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pzgvn_openshift-marketplace(2e8d277b-4e88-425b-a33f-3d657972fd59): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.386315 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-pzgvn" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.415071 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.415255 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j4kzn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-bmlc4_openshift-marketplace(0cdcfe28-8ae4-4938-8d64-d8255b92cf90): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.416528 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-bmlc4" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.706042 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.734750 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-59967d9484-q65t7"] Feb 02 08:59:45 crc kubenswrapper[4747]: E0202 08:59:45.735008 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeff741f-a126-41f7-b138-1437e07fdf8f" containerName="controller-manager" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.735024 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeff741f-a126-41f7-b138-1437e07fdf8f" containerName="controller-manager" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.735139 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeff741f-a126-41f7-b138-1437e07fdf8f" containerName="controller-manager" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.735564 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.740813 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-config\") pod \"eeff741f-a126-41f7-b138-1437e07fdf8f\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.740868 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeff741f-a126-41f7-b138-1437e07fdf8f-serving-cert\") pod \"eeff741f-a126-41f7-b138-1437e07fdf8f\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.740976 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-proxy-ca-bundles\") pod \"eeff741f-a126-41f7-b138-1437e07fdf8f\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.741021 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24ph7\" (UniqueName: \"kubernetes.io/projected/eeff741f-a126-41f7-b138-1437e07fdf8f-kube-api-access-24ph7\") pod \"eeff741f-a126-41f7-b138-1437e07fdf8f\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.741065 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-client-ca\") pod \"eeff741f-a126-41f7-b138-1437e07fdf8f\" (UID: \"eeff741f-a126-41f7-b138-1437e07fdf8f\") " Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.741732 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-config" (OuterVolumeSpecName: "config") pod "eeff741f-a126-41f7-b138-1437e07fdf8f" (UID: "eeff741f-a126-41f7-b138-1437e07fdf8f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.742159 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-client-ca" (OuterVolumeSpecName: "client-ca") pod "eeff741f-a126-41f7-b138-1437e07fdf8f" (UID: "eeff741f-a126-41f7-b138-1437e07fdf8f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.742311 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "eeff741f-a126-41f7-b138-1437e07fdf8f" (UID: "eeff741f-a126-41f7-b138-1437e07fdf8f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.752823 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59967d9484-q65t7"] Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.754108 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeff741f-a126-41f7-b138-1437e07fdf8f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "eeff741f-a126-41f7-b138-1437e07fdf8f" (UID: "eeff741f-a126-41f7-b138-1437e07fdf8f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.754246 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeff741f-a126-41f7-b138-1437e07fdf8f-kube-api-access-24ph7" (OuterVolumeSpecName: "kube-api-access-24ph7") pod "eeff741f-a126-41f7-b138-1437e07fdf8f" (UID: "eeff741f-a126-41f7-b138-1437e07fdf8f"). InnerVolumeSpecName "kube-api-access-24ph7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.812016 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz"] Feb 02 08:59:45 crc kubenswrapper[4747]: W0202 08:59:45.819392 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8113a24_4c89_4398_9d39_268cd40930ae.slice/crio-598b79fefe5721336a4ea098fbafbef24ee8291a10b750a2c06e2c48b6deb813 WatchSource:0}: Error finding container 598b79fefe5721336a4ea098fbafbef24ee8291a10b750a2c06e2c48b6deb813: Status 404 returned error can't find the container with id 598b79fefe5721336a4ea098fbafbef24ee8291a10b750a2c06e2c48b6deb813 Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-proxy-ca-bundles\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842518 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-config\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842552 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-client-ca\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f44377-91af-4adb-a377-12e5ede07d0c-serving-cert\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842601 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrwcx\" (UniqueName: \"kubernetes.io/projected/b3f44377-91af-4adb-a377-12e5ede07d0c-kube-api-access-hrwcx\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842676 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842690 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24ph7\" (UniqueName: \"kubernetes.io/projected/eeff741f-a126-41f7-b138-1437e07fdf8f-kube-api-access-24ph7\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842707 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842719 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeff741f-a126-41f7-b138-1437e07fdf8f-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.842729 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eeff741f-a126-41f7-b138-1437e07fdf8f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.944428 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-config\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.944482 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-client-ca\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.944512 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f44377-91af-4adb-a377-12e5ede07d0c-serving-cert\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.944536 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrwcx\" (UniqueName: \"kubernetes.io/projected/b3f44377-91af-4adb-a377-12e5ede07d0c-kube-api-access-hrwcx\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.944608 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-proxy-ca-bundles\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.945881 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-proxy-ca-bundles\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.946180 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-client-ca\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.948875 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-config\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.950885 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f44377-91af-4adb-a377-12e5ede07d0c-serving-cert\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:45 crc kubenswrapper[4747]: I0202 08:59:45.969231 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrwcx\" (UniqueName: \"kubernetes.io/projected/b3f44377-91af-4adb-a377-12e5ede07d0c-kube-api-access-hrwcx\") pod \"controller-manager-59967d9484-q65t7\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.039735 4747 generic.go:334] "Generic (PLEG): container finished" podID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerID="a2c4e5b1d70b28500c863b0a4179bcabf34317cf73a8d5b90465f8f0dff38d4b" exitCode=0 Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.039818 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9z22h" event={"ID":"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b","Type":"ContainerDied","Data":"a2c4e5b1d70b28500c863b0a4179bcabf34317cf73a8d5b90465f8f0dff38d4b"} Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.042676 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.042678 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-98c89bd56-vhhh5" event={"ID":"eeff741f-a126-41f7-b138-1437e07fdf8f","Type":"ContainerDied","Data":"8494dae79f13a8c0dfc015f9f109a721661fb8d3ae9c1d7850e97acd4c51c280"} Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.042983 4747 scope.go:117] "RemoveContainer" containerID="d3eda70847d65498da5cc5a682f2c5f40483a76f9b4791ff4027691d817c08ff" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.046440 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" event={"ID":"c8113a24-4c89-4398-9d39-268cd40930ae","Type":"ContainerStarted","Data":"4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589"} Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.046568 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" event={"ID":"c8113a24-4c89-4398-9d39-268cd40930ae","Type":"ContainerStarted","Data":"598b79fefe5721336a4ea098fbafbef24ee8291a10b750a2c06e2c48b6deb813"} Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.046656 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" podUID="c8113a24-4c89-4398-9d39-268cd40930ae" containerName="route-controller-manager" containerID="cri-o://4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589" gracePeriod=30 Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.046971 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.058157 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpzd5" event={"ID":"557f0623-5fe6-48cd-a958-88330d792ba8","Type":"ContainerStarted","Data":"fda4247cafdc9448d1856224d2c914875d950c718db80f06ea3db2730ac3ab1d"} Feb 02 08:59:46 crc kubenswrapper[4747]: E0202 08:59:46.073278 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pzgvn" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" Feb 02 08:59:46 crc kubenswrapper[4747]: E0202 08:59:46.073438 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-bmlc4" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.074823 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.080744 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" podStartSLOduration=24.080721457 podStartE2EDuration="24.080721457s" podCreationTimestamp="2026-02-02 08:59:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:46.08004134 +0000 UTC m=+198.624379793" watchObservedRunningTime="2026-02-02 08:59:46.080721457 +0000 UTC m=+198.625059900" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.089182 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-98c89bd56-vhhh5"] Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.095365 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-98c89bd56-vhhh5"] Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.258816 4747 patch_prober.go:28] interesting pod/route-controller-manager-7b9c54866d-6fqzz container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.55:8443/healthz\": read tcp 10.217.0.2:35130->10.217.0.55:8443: read: connection reset by peer" start-of-body= Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.259150 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" podUID="c8113a24-4c89-4398-9d39-268cd40930ae" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.55:8443/healthz\": read tcp 10.217.0.2:35130->10.217.0.55:8443: read: connection reset by peer" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.348928 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeff741f-a126-41f7-b138-1437e07fdf8f" path="/var/lib/kubelet/pods/eeff741f-a126-41f7-b138-1437e07fdf8f/volumes" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.484118 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59967d9484-q65t7"] Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.491566 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-7b9c54866d-6fqzz_c8113a24-4c89-4398-9d39-268cd40930ae/route-controller-manager/0.log" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.491627 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.554690 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tpmk\" (UniqueName: \"kubernetes.io/projected/c8113a24-4c89-4398-9d39-268cd40930ae-kube-api-access-7tpmk\") pod \"c8113a24-4c89-4398-9d39-268cd40930ae\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.554797 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-config\") pod \"c8113a24-4c89-4398-9d39-268cd40930ae\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.554826 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-client-ca\") pod \"c8113a24-4c89-4398-9d39-268cd40930ae\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.554854 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8113a24-4c89-4398-9d39-268cd40930ae-serving-cert\") pod \"c8113a24-4c89-4398-9d39-268cd40930ae\" (UID: \"c8113a24-4c89-4398-9d39-268cd40930ae\") " Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.555657 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-config" (OuterVolumeSpecName: "config") pod "c8113a24-4c89-4398-9d39-268cd40930ae" (UID: "c8113a24-4c89-4398-9d39-268cd40930ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.556074 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-client-ca" (OuterVolumeSpecName: "client-ca") pod "c8113a24-4c89-4398-9d39-268cd40930ae" (UID: "c8113a24-4c89-4398-9d39-268cd40930ae"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.559890 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8113a24-4c89-4398-9d39-268cd40930ae-kube-api-access-7tpmk" (OuterVolumeSpecName: "kube-api-access-7tpmk") pod "c8113a24-4c89-4398-9d39-268cd40930ae" (UID: "c8113a24-4c89-4398-9d39-268cd40930ae"). InnerVolumeSpecName "kube-api-access-7tpmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.559890 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8113a24-4c89-4398-9d39-268cd40930ae-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c8113a24-4c89-4398-9d39-268cd40930ae" (UID: "c8113a24-4c89-4398-9d39-268cd40930ae"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.655962 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tpmk\" (UniqueName: \"kubernetes.io/projected/c8113a24-4c89-4398-9d39-268cd40930ae-kube-api-access-7tpmk\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.655991 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-config\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.656000 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c8113a24-4c89-4398-9d39-268cd40930ae-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:46 crc kubenswrapper[4747]: I0202 08:59:46.656008 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c8113a24-4c89-4398-9d39-268cd40930ae-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.065422 4747 generic.go:334] "Generic (PLEG): container finished" podID="557f0623-5fe6-48cd-a958-88330d792ba8" containerID="fda4247cafdc9448d1856224d2c914875d950c718db80f06ea3db2730ac3ab1d" exitCode=0 Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.065482 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpzd5" event={"ID":"557f0623-5fe6-48cd-a958-88330d792ba8","Type":"ContainerDied","Data":"fda4247cafdc9448d1856224d2c914875d950c718db80f06ea3db2730ac3ab1d"} Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.071262 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9z22h" event={"ID":"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b","Type":"ContainerStarted","Data":"fb7a152e4ede4158c6be688a0a25af272d06075870a68e333da893cb9e8b1984"} Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.072805 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" event={"ID":"b3f44377-91af-4adb-a377-12e5ede07d0c","Type":"ContainerStarted","Data":"1e2b8b2cc7ca0853468047cdfdea184814f19b621d226849294abef3de03ab8b"} Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.072857 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" event={"ID":"b3f44377-91af-4adb-a377-12e5ede07d0c","Type":"ContainerStarted","Data":"a7fef6443112b1ba72d74d4e4d1c9cdf4b6394c551f01c626a2dbc70b44162af"} Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.073891 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.076576 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-7b9c54866d-6fqzz_c8113a24-4c89-4398-9d39-268cd40930ae/route-controller-manager/0.log" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.076632 4747 generic.go:334] "Generic (PLEG): container finished" podID="c8113a24-4c89-4398-9d39-268cd40930ae" containerID="4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589" exitCode=255 Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.076668 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" event={"ID":"c8113a24-4c89-4398-9d39-268cd40930ae","Type":"ContainerDied","Data":"4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589"} Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.076680 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.076703 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz" event={"ID":"c8113a24-4c89-4398-9d39-268cd40930ae","Type":"ContainerDied","Data":"598b79fefe5721336a4ea098fbafbef24ee8291a10b750a2c06e2c48b6deb813"} Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.076727 4747 scope.go:117] "RemoveContainer" containerID="4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.096863 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.104999 4747 scope.go:117] "RemoveContainer" containerID="4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589" Feb 02 08:59:47 crc kubenswrapper[4747]: E0202 08:59:47.105361 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589\": container with ID starting with 4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589 not found: ID does not exist" containerID="4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.105388 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589"} err="failed to get container status \"4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589\": rpc error: code = NotFound desc = could not find container \"4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589\": container with ID starting with 4778e5cfcc9c3a27620f6bb1d07852067416b7beda27b5d6bc37ae8ce6cb0589 not found: ID does not exist" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.113394 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" podStartSLOduration=5.113379227 podStartE2EDuration="5.113379227s" podCreationTimestamp="2026-02-02 08:59:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:47.110438181 +0000 UTC m=+199.654776614" watchObservedRunningTime="2026-02-02 08:59:47.113379227 +0000 UTC m=+199.657717660" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.140628 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9z22h" podStartSLOduration=4.356525568 podStartE2EDuration="38.140605724s" podCreationTimestamp="2026-02-02 08:59:09 +0000 UTC" firstStartedPulling="2026-02-02 08:59:12.742383444 +0000 UTC m=+165.286721877" lastFinishedPulling="2026-02-02 08:59:46.5264636 +0000 UTC m=+199.070802033" observedRunningTime="2026-02-02 08:59:47.131234284 +0000 UTC m=+199.675572717" watchObservedRunningTime="2026-02-02 08:59:47.140605724 +0000 UTC m=+199.684944157" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.142175 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz"] Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.144772 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b9c54866d-6fqzz"] Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.668846 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 08:59:47 crc kubenswrapper[4747]: E0202 08:59:47.669300 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8113a24-4c89-4398-9d39-268cd40930ae" containerName="route-controller-manager" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.669322 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8113a24-4c89-4398-9d39-268cd40930ae" containerName="route-controller-manager" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.669502 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8113a24-4c89-4398-9d39-268cd40930ae" containerName="route-controller-manager" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.670034 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.672386 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.672515 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.672532 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.771314 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/823c252b-f71f-4a30-82d1-9343d5eee2de-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.771721 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/823c252b-f71f-4a30-82d1-9343d5eee2de-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.872887 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/823c252b-f71f-4a30-82d1-9343d5eee2de-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.872994 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/823c252b-f71f-4a30-82d1-9343d5eee2de-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.873214 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/823c252b-f71f-4a30-82d1-9343d5eee2de-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.891180 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk"] Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.892053 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.895814 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.896356 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.896600 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.896788 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.897400 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.897534 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.910836 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/823c252b-f71f-4a30-82d1-9343d5eee2de-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.914988 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk"] Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.973681 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-client-ca\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.974018 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03608488-f7a3-44c6-be14-3219075c7e8c-serving-cert\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.974282 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pszg2\" (UniqueName: \"kubernetes.io/projected/03608488-f7a3-44c6-be14-3219075c7e8c-kube-api-access-pszg2\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.974416 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-config\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:47 crc kubenswrapper[4747]: I0202 08:59:47.987612 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.095185 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-config\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.095242 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-client-ca\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.095411 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03608488-f7a3-44c6-be14-3219075c7e8c-serving-cert\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.095455 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pszg2\" (UniqueName: \"kubernetes.io/projected/03608488-f7a3-44c6-be14-3219075c7e8c-kube-api-access-pszg2\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.096643 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-client-ca\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.097084 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-config\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.108728 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03608488-f7a3-44c6-be14-3219075c7e8c-serving-cert\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.112904 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpzd5" event={"ID":"557f0623-5fe6-48cd-a958-88330d792ba8","Type":"ContainerStarted","Data":"d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b"} Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.116771 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pszg2\" (UniqueName: \"kubernetes.io/projected/03608488-f7a3-44c6-be14-3219075c7e8c-kube-api-access-pszg2\") pod \"route-controller-manager-67885fc5d8-c2fdk\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.131500 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fpzd5" podStartSLOduration=4.35220324 podStartE2EDuration="38.131477593s" podCreationTimestamp="2026-02-02 08:59:10 +0000 UTC" firstStartedPulling="2026-02-02 08:59:13.756513898 +0000 UTC m=+166.300852341" lastFinishedPulling="2026-02-02 08:59:47.535788261 +0000 UTC m=+200.080126694" observedRunningTime="2026-02-02 08:59:48.130162359 +0000 UTC m=+200.674500792" watchObservedRunningTime="2026-02-02 08:59:48.131477593 +0000 UTC m=+200.675816026" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.204898 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.210241 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.392362 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8113a24-4c89-4398-9d39-268cd40930ae" path="/var/lib/kubelet/pods/c8113a24-4c89-4398-9d39-268cd40930ae/volumes" Feb 02 08:59:48 crc kubenswrapper[4747]: I0202 08:59:48.646397 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk"] Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.118456 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" event={"ID":"03608488-f7a3-44c6-be14-3219075c7e8c","Type":"ContainerStarted","Data":"213793d2afd3a5cd3e79a337e009acaa6a6cf45c96924ce6fa67e543aace7b8b"} Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.118740 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.118752 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" event={"ID":"03608488-f7a3-44c6-be14-3219075c7e8c","Type":"ContainerStarted","Data":"11e134a6ab0ae118f928e8073c915b2e46593aa003c94739b4627b8db896e8a6"} Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.121621 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"823c252b-f71f-4a30-82d1-9343d5eee2de","Type":"ContainerStarted","Data":"cb537d551e21c4ea2b7952e02f9eeb7c03a157eb4370429b1bc875fcb0e76ab9"} Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.121675 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"823c252b-f71f-4a30-82d1-9343d5eee2de","Type":"ContainerStarted","Data":"8983d1cf07e1b384e1eee57ed0746b65bca31fdd71c5c702b74cf621527d82d0"} Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.140066 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" podStartSLOduration=7.140045435 podStartE2EDuration="7.140045435s" podCreationTimestamp="2026-02-02 08:59:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:49.13632789 +0000 UTC m=+201.680666323" watchObservedRunningTime="2026-02-02 08:59:49.140045435 +0000 UTC m=+201.684383878" Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.153150 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.15313292 podStartE2EDuration="2.15313292s" podCreationTimestamp="2026-02-02 08:59:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:49.150327618 +0000 UTC m=+201.694666081" watchObservedRunningTime="2026-02-02 08:59:49.15313292 +0000 UTC m=+201.697471343" Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.177842 4747 patch_prober.go:28] interesting pod/route-controller-manager-67885fc5d8-c2fdk container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 08:59:49 crc kubenswrapper[4747]: [+]log ok Feb 02 08:59:49 crc kubenswrapper[4747]: [+]poststarthook/max-in-flight-filter ok Feb 02 08:59:49 crc kubenswrapper[4747]: [-]poststarthook/storage-object-count-tracker-hook failed: reason withheld Feb 02 08:59:49 crc kubenswrapper[4747]: healthz check failed Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.177928 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" podUID="03608488-f7a3-44c6-be14-3219075c7e8c" containerName="route-controller-manager" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.734989 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:49 crc kubenswrapper[4747]: I0202 08:59:49.735037 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.129231 4747 generic.go:334] "Generic (PLEG): container finished" podID="823c252b-f71f-4a30-82d1-9343d5eee2de" containerID="cb537d551e21c4ea2b7952e02f9eeb7c03a157eb4370429b1bc875fcb0e76ab9" exitCode=0 Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.129307 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"823c252b-f71f-4a30-82d1-9343d5eee2de","Type":"ContainerDied","Data":"cb537d551e21c4ea2b7952e02f9eeb7c03a157eb4370429b1bc875fcb0e76ab9"} Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.135608 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.209854 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.488171 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.488225 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.518925 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 08:59:50 crc kubenswrapper[4747]: I0202 08:59:50.519390 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.484109 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.528773 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fpzd5" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="registry-server" probeResult="failure" output=< Feb 02 08:59:51 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 08:59:51 crc kubenswrapper[4747]: > Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.543403 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/823c252b-f71f-4a30-82d1-9343d5eee2de-kube-api-access\") pod \"823c252b-f71f-4a30-82d1-9343d5eee2de\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.543515 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/823c252b-f71f-4a30-82d1-9343d5eee2de-kubelet-dir\") pod \"823c252b-f71f-4a30-82d1-9343d5eee2de\" (UID: \"823c252b-f71f-4a30-82d1-9343d5eee2de\") " Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.543854 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c252b-f71f-4a30-82d1-9343d5eee2de-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "823c252b-f71f-4a30-82d1-9343d5eee2de" (UID: "823c252b-f71f-4a30-82d1-9343d5eee2de"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.549583 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/823c252b-f71f-4a30-82d1-9343d5eee2de-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "823c252b-f71f-4a30-82d1-9343d5eee2de" (UID: "823c252b-f71f-4a30-82d1-9343d5eee2de"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.645161 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/823c252b-f71f-4a30-82d1-9343d5eee2de-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:51 crc kubenswrapper[4747]: I0202 08:59:51.645216 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/823c252b-f71f-4a30-82d1-9343d5eee2de-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.140086 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"823c252b-f71f-4a30-82d1-9343d5eee2de","Type":"ContainerDied","Data":"8983d1cf07e1b384e1eee57ed0746b65bca31fdd71c5c702b74cf621527d82d0"} Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.140145 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8983d1cf07e1b384e1eee57ed0746b65bca31fdd71c5c702b74cf621527d82d0" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.140149 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.255541 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 08:59:52 crc kubenswrapper[4747]: E0202 08:59:52.255775 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c252b-f71f-4a30-82d1-9343d5eee2de" containerName="pruner" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.255790 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c252b-f71f-4a30-82d1-9343d5eee2de" containerName="pruner" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.255911 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c252b-f71f-4a30-82d1-9343d5eee2de" containerName="pruner" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.256389 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.258589 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.258662 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.272720 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.356099 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-var-lock\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.356295 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.356374 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kube-api-access\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.458197 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-var-lock\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.458305 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.458337 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-var-lock\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.458347 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kube-api-access\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.458465 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.476093 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kube-api-access\") pod \"installer-9-crc\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:52 crc kubenswrapper[4747]: I0202 08:59:52.583606 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 08:59:53 crc kubenswrapper[4747]: I0202 08:59:52.999973 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 08:59:53 crc kubenswrapper[4747]: I0202 08:59:53.152012 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf","Type":"ContainerStarted","Data":"e725babf3c8c8474916f869ea3999610900024fde76e6bf98238803250e103a4"} Feb 02 08:59:54 crc kubenswrapper[4747]: I0202 08:59:54.160626 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf","Type":"ContainerStarted","Data":"9b150e209bf9fdb5dcb9f8fc7e0ac7c65352e87d52251eb429ace8f751634e69"} Feb 02 08:59:54 crc kubenswrapper[4747]: I0202 08:59:54.185135 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.185101202 podStartE2EDuration="2.185101202s" podCreationTimestamp="2026-02-02 08:59:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 08:59:54.177289262 +0000 UTC m=+206.721627695" watchObservedRunningTime="2026-02-02 08:59:54.185101202 +0000 UTC m=+206.729439675" Feb 02 08:59:56 crc kubenswrapper[4747]: I0202 08:59:56.174926 4747 generic.go:334] "Generic (PLEG): container finished" podID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerID="91d440e894facd5f0c8be86275bbe0ffd30ca23aa07c346c2f02dc2de19b6ea3" exitCode=0 Feb 02 08:59:56 crc kubenswrapper[4747]: I0202 08:59:56.174990 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5z2cn" event={"ID":"52602e18-9a62-4ee3-bfa3-530eb601caa9","Type":"ContainerDied","Data":"91d440e894facd5f0c8be86275bbe0ffd30ca23aa07c346c2f02dc2de19b6ea3"} Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.585050 4747 generic.go:334] "Generic (PLEG): container finished" podID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerID="fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be" exitCode=0 Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.585132 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzgvn" event={"ID":"2e8d277b-4e88-425b-a33f-3d657972fd59","Type":"ContainerDied","Data":"fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be"} Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.587927 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpz98" event={"ID":"87ba3296-51e6-4641-acbc-e24b60ffe91c","Type":"ContainerStarted","Data":"f213576c634d6a84ded43ecaf335c00b7ea2aedfe1a0a115a1fc29dccff98f5b"} Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.591410 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5z2cn" event={"ID":"52602e18-9a62-4ee3-bfa3-530eb601caa9","Type":"ContainerStarted","Data":"04e7ea152af00cbbd10d71b1824c6bbfa83760a6b5f917a02c231a764ebd8d43"} Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.593664 4747 generic.go:334] "Generic (PLEG): container finished" podID="7935ede6-8dc0-421f-9296-fbd017061975" containerID="9b447d01cf71e8cd37c4d8c1f6455d34dc1ff906853f2538cdca135df95f4140" exitCode=0 Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.593697 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wspvt" event={"ID":"7935ede6-8dc0-421f-9296-fbd017061975","Type":"ContainerDied","Data":"9b447d01cf71e8cd37c4d8c1f6455d34dc1ff906853f2538cdca135df95f4140"} Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.626495 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5z2cn" podStartSLOduration=5.879110508 podStartE2EDuration="51.626472417s" podCreationTimestamp="2026-02-02 08:59:08 +0000 UTC" firstStartedPulling="2026-02-02 08:59:12.743616446 +0000 UTC m=+165.287954879" lastFinishedPulling="2026-02-02 08:59:58.490978355 +0000 UTC m=+211.035316788" observedRunningTime="2026-02-02 08:59:59.623057859 +0000 UTC m=+212.167396312" watchObservedRunningTime="2026-02-02 08:59:59.626472417 +0000 UTC m=+212.170810860" Feb 02 08:59:59 crc kubenswrapper[4747]: I0202 08:59:59.776243 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.136489 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd"] Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.137294 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.138981 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.139650 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.145660 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd"] Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.254909 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2nk5\" (UniqueName: \"kubernetes.io/projected/939ae80d-cdd6-4306-b7fd-7b222f530288-kube-api-access-k2nk5\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.254998 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/939ae80d-cdd6-4306-b7fd-7b222f530288-config-volume\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.255024 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/939ae80d-cdd6-4306-b7fd-7b222f530288-secret-volume\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.356128 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2nk5\" (UniqueName: \"kubernetes.io/projected/939ae80d-cdd6-4306-b7fd-7b222f530288-kube-api-access-k2nk5\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.356685 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/939ae80d-cdd6-4306-b7fd-7b222f530288-config-volume\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.356709 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/939ae80d-cdd6-4306-b7fd-7b222f530288-secret-volume\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.357835 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/939ae80d-cdd6-4306-b7fd-7b222f530288-config-volume\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.362051 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/939ae80d-cdd6-4306-b7fd-7b222f530288-secret-volume\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.377812 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2nk5\" (UniqueName: \"kubernetes.io/projected/939ae80d-cdd6-4306-b7fd-7b222f530288-kube-api-access-k2nk5\") pod \"collect-profiles-29500380-2wfhd\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.452387 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.547354 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.595548 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.629195 4747 generic.go:334] "Generic (PLEG): container finished" podID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerID="f213576c634d6a84ded43ecaf335c00b7ea2aedfe1a0a115a1fc29dccff98f5b" exitCode=0 Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.630416 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpz98" event={"ID":"87ba3296-51e6-4641-acbc-e24b60ffe91c","Type":"ContainerDied","Data":"f213576c634d6a84ded43ecaf335c00b7ea2aedfe1a0a115a1fc29dccff98f5b"} Feb 02 09:00:00 crc kubenswrapper[4747]: I0202 09:00:00.855865 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd"] Feb 02 09:00:00 crc kubenswrapper[4747]: W0202 09:00:00.867503 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod939ae80d_cdd6_4306_b7fd_7b222f530288.slice/crio-6dda893512b5fa576d5659480f4ac2b1482f2707d010a996469186cecf0dccb5 WatchSource:0}: Error finding container 6dda893512b5fa576d5659480f4ac2b1482f2707d010a996469186cecf0dccb5: Status 404 returned error can't find the container with id 6dda893512b5fa576d5659480f4ac2b1482f2707d010a996469186cecf0dccb5 Feb 02 09:00:01 crc kubenswrapper[4747]: I0202 09:00:01.637000 4747 generic.go:334] "Generic (PLEG): container finished" podID="939ae80d-cdd6-4306-b7fd-7b222f530288" containerID="367d53fc44cfbd809577fe19febfdd2715d049919efa8816d6dc8abed2d81b28" exitCode=0 Feb 02 09:00:01 crc kubenswrapper[4747]: I0202 09:00:01.637046 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" event={"ID":"939ae80d-cdd6-4306-b7fd-7b222f530288","Type":"ContainerDied","Data":"367d53fc44cfbd809577fe19febfdd2715d049919efa8816d6dc8abed2d81b28"} Feb 02 09:00:01 crc kubenswrapper[4747]: I0202 09:00:01.638140 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" event={"ID":"939ae80d-cdd6-4306-b7fd-7b222f530288","Type":"ContainerStarted","Data":"6dda893512b5fa576d5659480f4ac2b1482f2707d010a996469186cecf0dccb5"} Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.007325 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9z22h"] Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.007591 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9z22h" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="registry-server" containerID="cri-o://fb7a152e4ede4158c6be688a0a25af272d06075870a68e333da893cb9e8b1984" gracePeriod=2 Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.336428 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-59967d9484-q65t7"] Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.336677 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" podUID="b3f44377-91af-4adb-a377-12e5ede07d0c" containerName="controller-manager" containerID="cri-o://1e2b8b2cc7ca0853468047cdfdea184814f19b621d226849294abef3de03ab8b" gracePeriod=30 Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.359833 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk"] Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.360083 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" podUID="03608488-f7a3-44c6-be14-3219075c7e8c" containerName="route-controller-manager" containerID="cri-o://213793d2afd3a5cd3e79a337e009acaa6a6cf45c96924ce6fa67e543aace7b8b" gracePeriod=30 Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.649165 4747 generic.go:334] "Generic (PLEG): container finished" podID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerID="fb7a152e4ede4158c6be688a0a25af272d06075870a68e333da893cb9e8b1984" exitCode=0 Feb 02 09:00:02 crc kubenswrapper[4747]: I0202 09:00:02.649263 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9z22h" event={"ID":"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b","Type":"ContainerDied","Data":"fb7a152e4ede4158c6be688a0a25af272d06075870a68e333da893cb9e8b1984"} Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.023948 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.191536 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2nk5\" (UniqueName: \"kubernetes.io/projected/939ae80d-cdd6-4306-b7fd-7b222f530288-kube-api-access-k2nk5\") pod \"939ae80d-cdd6-4306-b7fd-7b222f530288\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.191621 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/939ae80d-cdd6-4306-b7fd-7b222f530288-secret-volume\") pod \"939ae80d-cdd6-4306-b7fd-7b222f530288\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.191652 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/939ae80d-cdd6-4306-b7fd-7b222f530288-config-volume\") pod \"939ae80d-cdd6-4306-b7fd-7b222f530288\" (UID: \"939ae80d-cdd6-4306-b7fd-7b222f530288\") " Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.192904 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/939ae80d-cdd6-4306-b7fd-7b222f530288-config-volume" (OuterVolumeSpecName: "config-volume") pod "939ae80d-cdd6-4306-b7fd-7b222f530288" (UID: "939ae80d-cdd6-4306-b7fd-7b222f530288"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.206049 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/939ae80d-cdd6-4306-b7fd-7b222f530288-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "939ae80d-cdd6-4306-b7fd-7b222f530288" (UID: "939ae80d-cdd6-4306-b7fd-7b222f530288"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.206183 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/939ae80d-cdd6-4306-b7fd-7b222f530288-kube-api-access-k2nk5" (OuterVolumeSpecName: "kube-api-access-k2nk5") pod "939ae80d-cdd6-4306-b7fd-7b222f530288" (UID: "939ae80d-cdd6-4306-b7fd-7b222f530288"). InnerVolumeSpecName "kube-api-access-k2nk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.295053 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/939ae80d-cdd6-4306-b7fd-7b222f530288-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.295086 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/939ae80d-cdd6-4306-b7fd-7b222f530288-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.295102 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2nk5\" (UniqueName: \"kubernetes.io/projected/939ae80d-cdd6-4306-b7fd-7b222f530288-kube-api-access-k2nk5\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.520021 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.659055 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9z22h" event={"ID":"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b","Type":"ContainerDied","Data":"1d9952843a957fa43eaa916e2a1dc5bab2c1cdd954c9d851c403b1177ec9c0d7"} Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.659118 4747 scope.go:117] "RemoveContainer" containerID="fb7a152e4ede4158c6be688a0a25af272d06075870a68e333da893cb9e8b1984" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.659261 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9z22h" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.661893 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3f44377-91af-4adb-a377-12e5ede07d0c" containerID="1e2b8b2cc7ca0853468047cdfdea184814f19b621d226849294abef3de03ab8b" exitCode=0 Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.662114 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" event={"ID":"b3f44377-91af-4adb-a377-12e5ede07d0c","Type":"ContainerDied","Data":"1e2b8b2cc7ca0853468047cdfdea184814f19b621d226849294abef3de03ab8b"} Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.664874 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzgvn" event={"ID":"2e8d277b-4e88-425b-a33f-3d657972fd59","Type":"ContainerStarted","Data":"33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80"} Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.676745 4747 generic.go:334] "Generic (PLEG): container finished" podID="03608488-f7a3-44c6-be14-3219075c7e8c" containerID="213793d2afd3a5cd3e79a337e009acaa6a6cf45c96924ce6fa67e543aace7b8b" exitCode=0 Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.676844 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" event={"ID":"03608488-f7a3-44c6-be14-3219075c7e8c","Type":"ContainerDied","Data":"213793d2afd3a5cd3e79a337e009acaa6a6cf45c96924ce6fa67e543aace7b8b"} Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.678575 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" event={"ID":"939ae80d-cdd6-4306-b7fd-7b222f530288","Type":"ContainerDied","Data":"6dda893512b5fa576d5659480f4ac2b1482f2707d010a996469186cecf0dccb5"} Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.678632 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dda893512b5fa576d5659480f4ac2b1482f2707d010a996469186cecf0dccb5" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.678702 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.683993 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pzgvn" podStartSLOduration=2.897714103 podStartE2EDuration="56.68397391s" podCreationTimestamp="2026-02-02 08:59:07 +0000 UTC" firstStartedPulling="2026-02-02 08:59:08.695300888 +0000 UTC m=+161.239639321" lastFinishedPulling="2026-02-02 09:00:02.481560695 +0000 UTC m=+215.025899128" observedRunningTime="2026-02-02 09:00:03.681545638 +0000 UTC m=+216.225884081" watchObservedRunningTime="2026-02-02 09:00:03.68397391 +0000 UTC m=+216.228312343" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.706410 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-utilities\") pod \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.706478 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlcvn\" (UniqueName: \"kubernetes.io/projected/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-kube-api-access-tlcvn\") pod \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.707468 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-catalog-content\") pod \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\" (UID: \"d3be7cd8-dbbb-4866-9f01-5e8aa31c695b\") " Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.707535 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-utilities" (OuterVolumeSpecName: "utilities") pod "d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" (UID: "d3be7cd8-dbbb-4866-9f01-5e8aa31c695b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.707762 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.718339 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-kube-api-access-tlcvn" (OuterVolumeSpecName: "kube-api-access-tlcvn") pod "d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" (UID: "d3be7cd8-dbbb-4866-9f01-5e8aa31c695b"). InnerVolumeSpecName "kube-api-access-tlcvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.731748 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" (UID: "d3be7cd8-dbbb-4866-9f01-5e8aa31c695b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.808561 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.808594 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlcvn\" (UniqueName: \"kubernetes.io/projected/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b-kube-api-access-tlcvn\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.986296 4747 scope.go:117] "RemoveContainer" containerID="a2c4e5b1d70b28500c863b0a4179bcabf34317cf73a8d5b90465f8f0dff38d4b" Feb 02 09:00:03 crc kubenswrapper[4747]: I0202 09:00:03.995139 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9z22h"] Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.000805 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9z22h"] Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.021871 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.052099 4747 scope.go:117] "RemoveContainer" containerID="9c8f68331ee732c6da8940865a683a2402d95c9a52097c2a8e819eced4a64fba" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.130613 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.213364 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-client-ca\") pod \"b3f44377-91af-4adb-a377-12e5ede07d0c\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.213449 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-proxy-ca-bundles\") pod \"b3f44377-91af-4adb-a377-12e5ede07d0c\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.213483 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrwcx\" (UniqueName: \"kubernetes.io/projected/b3f44377-91af-4adb-a377-12e5ede07d0c-kube-api-access-hrwcx\") pod \"b3f44377-91af-4adb-a377-12e5ede07d0c\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.213509 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f44377-91af-4adb-a377-12e5ede07d0c-serving-cert\") pod \"b3f44377-91af-4adb-a377-12e5ede07d0c\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.213558 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-config\") pod \"b3f44377-91af-4adb-a377-12e5ede07d0c\" (UID: \"b3f44377-91af-4adb-a377-12e5ede07d0c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.214221 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b3f44377-91af-4adb-a377-12e5ede07d0c" (UID: "b3f44377-91af-4adb-a377-12e5ede07d0c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.214276 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-client-ca" (OuterVolumeSpecName: "client-ca") pod "b3f44377-91af-4adb-a377-12e5ede07d0c" (UID: "b3f44377-91af-4adb-a377-12e5ede07d0c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.214290 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-config" (OuterVolumeSpecName: "config") pod "b3f44377-91af-4adb-a377-12e5ede07d0c" (UID: "b3f44377-91af-4adb-a377-12e5ede07d0c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.218223 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3f44377-91af-4adb-a377-12e5ede07d0c-kube-api-access-hrwcx" (OuterVolumeSpecName: "kube-api-access-hrwcx") pod "b3f44377-91af-4adb-a377-12e5ede07d0c" (UID: "b3f44377-91af-4adb-a377-12e5ede07d0c"). InnerVolumeSpecName "kube-api-access-hrwcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.219502 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f44377-91af-4adb-a377-12e5ede07d0c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b3f44377-91af-4adb-a377-12e5ede07d0c" (UID: "b3f44377-91af-4adb-a377-12e5ede07d0c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.314685 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pszg2\" (UniqueName: \"kubernetes.io/projected/03608488-f7a3-44c6-be14-3219075c7e8c-kube-api-access-pszg2\") pod \"03608488-f7a3-44c6-be14-3219075c7e8c\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.314851 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03608488-f7a3-44c6-be14-3219075c7e8c-serving-cert\") pod \"03608488-f7a3-44c6-be14-3219075c7e8c\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.314962 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-config\") pod \"03608488-f7a3-44c6-be14-3219075c7e8c\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315001 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-client-ca\") pod \"03608488-f7a3-44c6-be14-3219075c7e8c\" (UID: \"03608488-f7a3-44c6-be14-3219075c7e8c\") " Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315293 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315318 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrwcx\" (UniqueName: \"kubernetes.io/projected/b3f44377-91af-4adb-a377-12e5ede07d0c-kube-api-access-hrwcx\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315340 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f44377-91af-4adb-a377-12e5ede07d0c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315356 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315372 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3f44377-91af-4adb-a377-12e5ede07d0c-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315656 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-client-ca" (OuterVolumeSpecName: "client-ca") pod "03608488-f7a3-44c6-be14-3219075c7e8c" (UID: "03608488-f7a3-44c6-be14-3219075c7e8c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.315753 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-config" (OuterVolumeSpecName: "config") pod "03608488-f7a3-44c6-be14-3219075c7e8c" (UID: "03608488-f7a3-44c6-be14-3219075c7e8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.319364 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03608488-f7a3-44c6-be14-3219075c7e8c-kube-api-access-pszg2" (OuterVolumeSpecName: "kube-api-access-pszg2") pod "03608488-f7a3-44c6-be14-3219075c7e8c" (UID: "03608488-f7a3-44c6-be14-3219075c7e8c"). InnerVolumeSpecName "kube-api-access-pszg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.322510 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03608488-f7a3-44c6-be14-3219075c7e8c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "03608488-f7a3-44c6-be14-3219075c7e8c" (UID: "03608488-f7a3-44c6-be14-3219075c7e8c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.360376 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" path="/var/lib/kubelet/pods/d3be7cd8-dbbb-4866-9f01-5e8aa31c695b/volumes" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.416746 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03608488-f7a3-44c6-be14-3219075c7e8c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.417284 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.417297 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03608488-f7a3-44c6-be14-3219075c7e8c-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.417310 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pszg2\" (UniqueName: \"kubernetes.io/projected/03608488-f7a3-44c6-be14-3219075c7e8c-kube-api-access-pszg2\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.686367 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wspvt" event={"ID":"7935ede6-8dc0-421f-9296-fbd017061975","Type":"ContainerStarted","Data":"770def2af9fc346e033e3b5664c7dcb87cfe372ae1f3439bb5d233e1fdd891d7"} Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.691313 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpz98" event={"ID":"87ba3296-51e6-4641-acbc-e24b60ffe91c","Type":"ContainerStarted","Data":"64e53759c6ad143460d40fd9b63fb4c514b75e883fb58315c22d46034eebebfb"} Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.693733 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" event={"ID":"b3f44377-91af-4adb-a377-12e5ede07d0c","Type":"ContainerDied","Data":"a7fef6443112b1ba72d74d4e4d1c9cdf4b6394c551f01c626a2dbc70b44162af"} Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.693858 4747 scope.go:117] "RemoveContainer" containerID="1e2b8b2cc7ca0853468047cdfdea184814f19b621d226849294abef3de03ab8b" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.693758 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59967d9484-q65t7" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.696565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerStarted","Data":"8888085c082fb6e4d4a8c9025060bb93960c273ddccefa2bf3994b9950ee2cfa"} Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.698957 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" event={"ID":"03608488-f7a3-44c6-be14-3219075c7e8c","Type":"ContainerDied","Data":"11e134a6ab0ae118f928e8073c915b2e46593aa003c94739b4627b8db896e8a6"} Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.699159 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.703220 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jt9" event={"ID":"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989","Type":"ContainerStarted","Data":"5f01e542dbff96d1173dff2c62a47aa6c1e96e5025c8db7316f5f64e4348388c"} Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.712147 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wspvt" podStartSLOduration=2.219942641 podStartE2EDuration="57.712108463s" podCreationTimestamp="2026-02-02 08:59:07 +0000 UTC" firstStartedPulling="2026-02-02 08:59:08.679874953 +0000 UTC m=+161.224213386" lastFinishedPulling="2026-02-02 09:00:04.172040775 +0000 UTC m=+216.716379208" observedRunningTime="2026-02-02 09:00:04.707344951 +0000 UTC m=+217.251683384" watchObservedRunningTime="2026-02-02 09:00:04.712108463 +0000 UTC m=+217.256446896" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.720316 4747 scope.go:117] "RemoveContainer" containerID="213793d2afd3a5cd3e79a337e009acaa6a6cf45c96924ce6fa67e543aace7b8b" Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.748138 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-59967d9484-q65t7"] Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.751571 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-59967d9484-q65t7"] Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.763841 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk"] Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.766111 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-67885fc5d8-c2fdk"] Feb 02 09:00:04 crc kubenswrapper[4747]: I0202 09:00:04.798977 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dpz98" podStartSLOduration=2.338492415 podStartE2EDuration="57.798961366s" podCreationTimestamp="2026-02-02 08:59:07 +0000 UTC" firstStartedPulling="2026-02-02 08:59:08.69266064 +0000 UTC m=+161.236999073" lastFinishedPulling="2026-02-02 09:00:04.153129591 +0000 UTC m=+216.697468024" observedRunningTime="2026-02-02 09:00:04.79713373 +0000 UTC m=+217.341472163" watchObservedRunningTime="2026-02-02 09:00:04.798961366 +0000 UTC m=+217.343299799" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.720454 4747 generic.go:334] "Generic (PLEG): container finished" podID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerID="8888085c082fb6e4d4a8c9025060bb93960c273ddccefa2bf3994b9950ee2cfa" exitCode=0 Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.720565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerDied","Data":"8888085c082fb6e4d4a8c9025060bb93960c273ddccefa2bf3994b9950ee2cfa"} Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.731624 4747 generic.go:334] "Generic (PLEG): container finished" podID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerID="5f01e542dbff96d1173dff2c62a47aa6c1e96e5025c8db7316f5f64e4348388c" exitCode=0 Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.732281 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jt9" event={"ID":"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989","Type":"ContainerDied","Data":"5f01e542dbff96d1173dff2c62a47aa6c1e96e5025c8db7316f5f64e4348388c"} Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.900717 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-9b947f799-kfzsz"] Feb 02 09:00:05 crc kubenswrapper[4747]: E0202 09:00:05.901825 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="registry-server" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.901896 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="registry-server" Feb 02 09:00:05 crc kubenswrapper[4747]: E0202 09:00:05.901916 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3f44377-91af-4adb-a377-12e5ede07d0c" containerName="controller-manager" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.901926 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3f44377-91af-4adb-a377-12e5ede07d0c" containerName="controller-manager" Feb 02 09:00:05 crc kubenswrapper[4747]: E0202 09:00:05.901953 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03608488-f7a3-44c6-be14-3219075c7e8c" containerName="route-controller-manager" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.901964 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="03608488-f7a3-44c6-be14-3219075c7e8c" containerName="route-controller-manager" Feb 02 09:00:05 crc kubenswrapper[4747]: E0202 09:00:05.901983 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="939ae80d-cdd6-4306-b7fd-7b222f530288" containerName="collect-profiles" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.901990 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="939ae80d-cdd6-4306-b7fd-7b222f530288" containerName="collect-profiles" Feb 02 09:00:05 crc kubenswrapper[4747]: E0202 09:00:05.902002 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="extract-utilities" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.902009 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="extract-utilities" Feb 02 09:00:05 crc kubenswrapper[4747]: E0202 09:00:05.902020 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="extract-content" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.902026 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="extract-content" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.902145 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3be7cd8-dbbb-4866-9f01-5e8aa31c695b" containerName="registry-server" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.902165 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3f44377-91af-4adb-a377-12e5ede07d0c" containerName="controller-manager" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.902176 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="03608488-f7a3-44c6-be14-3219075c7e8c" containerName="route-controller-manager" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.902186 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="939ae80d-cdd6-4306-b7fd-7b222f530288" containerName="collect-profiles" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.902809 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.905200 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.906415 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.906504 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.906501 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.906617 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.906644 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.910366 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-9b947f799-kfzsz"] Feb 02 09:00:05 crc kubenswrapper[4747]: I0202 09:00:05.916319 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.037218 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-client-ca\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.037587 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-serving-cert\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.037627 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-proxy-ca-bundles\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.037643 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-config\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.037665 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhpmx\" (UniqueName: \"kubernetes.io/projected/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-kube-api-access-qhpmx\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.139135 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-client-ca\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.139225 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-serving-cert\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.139293 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-proxy-ca-bundles\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.139330 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-config\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.139380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhpmx\" (UniqueName: \"kubernetes.io/projected/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-kube-api-access-qhpmx\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.140468 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-proxy-ca-bundles\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.140584 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-config\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.141017 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-client-ca\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.144290 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-serving-cert\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.162085 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhpmx\" (UniqueName: \"kubernetes.io/projected/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-kube-api-access-qhpmx\") pod \"controller-manager-9b947f799-kfzsz\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.219640 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.348644 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03608488-f7a3-44c6-be14-3219075c7e8c" path="/var/lib/kubelet/pods/03608488-f7a3-44c6-be14-3219075c7e8c/volumes" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.349413 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3f44377-91af-4adb-a377-12e5ede07d0c" path="/var/lib/kubelet/pods/b3f44377-91af-4adb-a377-12e5ede07d0c/volumes" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.619894 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-9b947f799-kfzsz"] Feb 02 09:00:06 crc kubenswrapper[4747]: W0202 09:00:06.631395 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3e7c5e0_6a5d_401c_9b44_d05468fa98d4.slice/crio-2e1cc1f36676a68ea3f501599e5b7dbbfc9c9729ad2ff3813d9962ee06d1d092 WatchSource:0}: Error finding container 2e1cc1f36676a68ea3f501599e5b7dbbfc9c9729ad2ff3813d9962ee06d1d092: Status 404 returned error can't find the container with id 2e1cc1f36676a68ea3f501599e5b7dbbfc9c9729ad2ff3813d9962ee06d1d092 Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.738189 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerStarted","Data":"2444a5b17144cd4063182e041ce4792170592ac6f8e1e8c79a0a69201f11e87d"} Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.741054 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jt9" event={"ID":"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989","Type":"ContainerStarted","Data":"acafef1c1423db88c87b95553872b6ecaae576a3bad1ac7111a8f2c8170a1d87"} Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.742119 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" event={"ID":"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4","Type":"ContainerStarted","Data":"2e1cc1f36676a68ea3f501599e5b7dbbfc9c9729ad2ff3813d9962ee06d1d092"} Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.755248 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bmlc4" podStartSLOduration=2.960106097 podStartE2EDuration="1m0.755229663s" podCreationTimestamp="2026-02-02 08:59:06 +0000 UTC" firstStartedPulling="2026-02-02 08:59:08.69736096 +0000 UTC m=+161.241699403" lastFinishedPulling="2026-02-02 09:00:06.492484536 +0000 UTC m=+219.036822969" observedRunningTime="2026-02-02 09:00:06.753287253 +0000 UTC m=+219.297625706" watchObservedRunningTime="2026-02-02 09:00:06.755229663 +0000 UTC m=+219.299568096" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.771074 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p8jt9" podStartSLOduration=4.146903333 podStartE2EDuration="56.771057188s" podCreationTimestamp="2026-02-02 08:59:10 +0000 UTC" firstStartedPulling="2026-02-02 08:59:13.7542431 +0000 UTC m=+166.298581533" lastFinishedPulling="2026-02-02 09:00:06.378396955 +0000 UTC m=+218.922735388" observedRunningTime="2026-02-02 09:00:06.768037601 +0000 UTC m=+219.312376034" watchObservedRunningTime="2026-02-02 09:00:06.771057188 +0000 UTC m=+219.315395621" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.901277 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb"] Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.901868 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.905318 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.905519 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.906237 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.906387 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.906801 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.912624 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb"] Feb 02 09:00:06 crc kubenswrapper[4747]: I0202 09:00:06.915111 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.049977 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a3de2f8-4721-4746-9a12-2047ae2ff972-serving-cert\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.050058 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-config\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.050089 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-client-ca\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.050123 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lfxx\" (UniqueName: \"kubernetes.io/projected/8a3de2f8-4721-4746-9a12-2047ae2ff972-kube-api-access-5lfxx\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.151833 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a3de2f8-4721-4746-9a12-2047ae2ff972-serving-cert\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.152198 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-config\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.152221 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-client-ca\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.152247 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lfxx\" (UniqueName: \"kubernetes.io/projected/8a3de2f8-4721-4746-9a12-2047ae2ff972-kube-api-access-5lfxx\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.154400 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-client-ca\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.154481 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-config\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.158290 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a3de2f8-4721-4746-9a12-2047ae2ff972-serving-cert\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.168670 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lfxx\" (UniqueName: \"kubernetes.io/projected/8a3de2f8-4721-4746-9a12-2047ae2ff972-kube-api-access-5lfxx\") pod \"route-controller-manager-75d4944dc7-2t7rb\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.222126 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.272738 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.272781 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.425902 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb"] Feb 02 09:00:07 crc kubenswrapper[4747]: W0202 09:00:07.436977 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a3de2f8_4721_4746_9a12_2047ae2ff972.slice/crio-2e866cb0246c34b766edf07c4fd0fcdd0aa370a253c6a456360165d35ebf5398 WatchSource:0}: Error finding container 2e866cb0246c34b766edf07c4fd0fcdd0aa370a253c6a456360165d35ebf5398: Status 404 returned error can't find the container with id 2e866cb0246c34b766edf07c4fd0fcdd0aa370a253c6a456360165d35ebf5398 Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.495527 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.495585 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.550248 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.672139 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.672186 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.710907 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.752411 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" event={"ID":"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4","Type":"ContainerStarted","Data":"01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89"} Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.753866 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.755744 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" event={"ID":"8a3de2f8-4721-4746-9a12-2047ae2ff972","Type":"ContainerStarted","Data":"328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778"} Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.755821 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" event={"ID":"8a3de2f8-4721-4746-9a12-2047ae2ff972","Type":"ContainerStarted","Data":"2e866cb0246c34b766edf07c4fd0fcdd0aa370a253c6a456360165d35ebf5398"} Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.760818 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.773563 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" podStartSLOduration=5.7735403739999995 podStartE2EDuration="5.773540374s" podCreationTimestamp="2026-02-02 09:00:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:00:07.771581484 +0000 UTC m=+220.315919937" watchObservedRunningTime="2026-02-02 09:00:07.773540374 +0000 UTC m=+220.317878797" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.809863 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.814259 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" podStartSLOduration=5.814244306 podStartE2EDuration="5.814244306s" podCreationTimestamp="2026-02-02 09:00:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:00:07.811254009 +0000 UTC m=+220.355592452" watchObservedRunningTime="2026-02-02 09:00:07.814244306 +0000 UTC m=+220.358582739" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.860790 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.861799 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 09:00:07 crc kubenswrapper[4747]: I0202 09:00:07.899015 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 09:00:08 crc kubenswrapper[4747]: I0202 09:00:08.316681 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-bmlc4" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="registry-server" probeResult="failure" output=< Feb 02 09:00:08 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:00:08 crc kubenswrapper[4747]: > Feb 02 09:00:08 crc kubenswrapper[4747]: I0202 09:00:08.404471 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pzgvn"] Feb 02 09:00:08 crc kubenswrapper[4747]: I0202 09:00:08.762339 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:08 crc kubenswrapper[4747]: I0202 09:00:08.768170 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:09 crc kubenswrapper[4747]: I0202 09:00:09.269982 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 09:00:09 crc kubenswrapper[4747]: I0202 09:00:09.270040 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 09:00:09 crc kubenswrapper[4747]: I0202 09:00:09.317279 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 09:00:09 crc kubenswrapper[4747]: I0202 09:00:09.772176 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pzgvn" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="registry-server" containerID="cri-o://33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80" gracePeriod=2 Feb 02 09:00:09 crc kubenswrapper[4747]: I0202 09:00:09.821453 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 09:00:09 crc kubenswrapper[4747]: I0202 09:00:09.825688 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.395386 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.495386 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb5lh\" (UniqueName: \"kubernetes.io/projected/2e8d277b-4e88-425b-a33f-3d657972fd59-kube-api-access-zb5lh\") pod \"2e8d277b-4e88-425b-a33f-3d657972fd59\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.495495 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-catalog-content\") pod \"2e8d277b-4e88-425b-a33f-3d657972fd59\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.495602 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-utilities\") pod \"2e8d277b-4e88-425b-a33f-3d657972fd59\" (UID: \"2e8d277b-4e88-425b-a33f-3d657972fd59\") " Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.497246 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-utilities" (OuterVolumeSpecName: "utilities") pod "2e8d277b-4e88-425b-a33f-3d657972fd59" (UID: "2e8d277b-4e88-425b-a33f-3d657972fd59"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.503439 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e8d277b-4e88-425b-a33f-3d657972fd59-kube-api-access-zb5lh" (OuterVolumeSpecName: "kube-api-access-zb5lh") pod "2e8d277b-4e88-425b-a33f-3d657972fd59" (UID: "2e8d277b-4e88-425b-a33f-3d657972fd59"). InnerVolumeSpecName "kube-api-access-zb5lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.565496 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e8d277b-4e88-425b-a33f-3d657972fd59" (UID: "2e8d277b-4e88-425b-a33f-3d657972fd59"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.597570 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb5lh\" (UniqueName: \"kubernetes.io/projected/2e8d277b-4e88-425b-a33f-3d657972fd59-kube-api-access-zb5lh\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.597655 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.597670 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e8d277b-4e88-425b-a33f-3d657972fd59-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.779479 4747 generic.go:334] "Generic (PLEG): container finished" podID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerID="33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80" exitCode=0 Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.779532 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzgvn" event={"ID":"2e8d277b-4e88-425b-a33f-3d657972fd59","Type":"ContainerDied","Data":"33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80"} Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.779552 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pzgvn" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.779576 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pzgvn" event={"ID":"2e8d277b-4e88-425b-a33f-3d657972fd59","Type":"ContainerDied","Data":"d72af6d6b145d56377a65b0862ac16d0579821ab4b6556111dce4742805b790c"} Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.779605 4747 scope.go:117] "RemoveContainer" containerID="33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.795715 4747 scope.go:117] "RemoveContainer" containerID="fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.807720 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pzgvn"] Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.811119 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wspvt"] Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.813429 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pzgvn"] Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.843661 4747 scope.go:117] "RemoveContainer" containerID="8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.857247 4747 scope.go:117] "RemoveContainer" containerID="33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80" Feb 02 09:00:10 crc kubenswrapper[4747]: E0202 09:00:10.857782 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80\": container with ID starting with 33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80 not found: ID does not exist" containerID="33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.857821 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80"} err="failed to get container status \"33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80\": rpc error: code = NotFound desc = could not find container \"33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80\": container with ID starting with 33a75159e3f403740f76f867cd18aec0dd02b72b3c6bb16ea5f32eed7ac42d80 not found: ID does not exist" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.857849 4747 scope.go:117] "RemoveContainer" containerID="fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be" Feb 02 09:00:10 crc kubenswrapper[4747]: E0202 09:00:10.858179 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be\": container with ID starting with fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be not found: ID does not exist" containerID="fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.858226 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be"} err="failed to get container status \"fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be\": rpc error: code = NotFound desc = could not find container \"fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be\": container with ID starting with fadc292de5657735740564db9803aacfb00635ef80b8f98edd4de53863b3a6be not found: ID does not exist" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.858254 4747 scope.go:117] "RemoveContainer" containerID="8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf" Feb 02 09:00:10 crc kubenswrapper[4747]: E0202 09:00:10.858469 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf\": container with ID starting with 8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf not found: ID does not exist" containerID="8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.858492 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf"} err="failed to get container status \"8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf\": rpc error: code = NotFound desc = could not find container \"8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf\": container with ID starting with 8e3d78e277d713f04493a2b024fc3db84c69e316c9d7e7502014d916df173fbf not found: ID does not exist" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.861956 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 09:00:10 crc kubenswrapper[4747]: I0202 09:00:10.862090 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 09:00:11 crc kubenswrapper[4747]: I0202 09:00:11.786783 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wspvt" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="registry-server" containerID="cri-o://770def2af9fc346e033e3b5664c7dcb87cfe372ae1f3439bb5d233e1fdd891d7" gracePeriod=2 Feb 02 09:00:11 crc kubenswrapper[4747]: I0202 09:00:11.905314 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p8jt9" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="registry-server" probeResult="failure" output=< Feb 02 09:00:11 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:00:11 crc kubenswrapper[4747]: > Feb 02 09:00:12 crc kubenswrapper[4747]: I0202 09:00:12.349810 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" path="/var/lib/kubelet/pods/2e8d277b-4e88-425b-a33f-3d657972fd59/volumes" Feb 02 09:00:12 crc kubenswrapper[4747]: I0202 09:00:12.798005 4747 generic.go:334] "Generic (PLEG): container finished" podID="7935ede6-8dc0-421f-9296-fbd017061975" containerID="770def2af9fc346e033e3b5664c7dcb87cfe372ae1f3439bb5d233e1fdd891d7" exitCode=0 Feb 02 09:00:12 crc kubenswrapper[4747]: I0202 09:00:12.798060 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wspvt" event={"ID":"7935ede6-8dc0-421f-9296-fbd017061975","Type":"ContainerDied","Data":"770def2af9fc346e033e3b5664c7dcb87cfe372ae1f3439bb5d233e1fdd891d7"} Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.097770 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.232116 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz9sn\" (UniqueName: \"kubernetes.io/projected/7935ede6-8dc0-421f-9296-fbd017061975-kube-api-access-vz9sn\") pod \"7935ede6-8dc0-421f-9296-fbd017061975\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.232193 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-catalog-content\") pod \"7935ede6-8dc0-421f-9296-fbd017061975\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.232223 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-utilities\") pod \"7935ede6-8dc0-421f-9296-fbd017061975\" (UID: \"7935ede6-8dc0-421f-9296-fbd017061975\") " Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.233098 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-utilities" (OuterVolumeSpecName: "utilities") pod "7935ede6-8dc0-421f-9296-fbd017061975" (UID: "7935ede6-8dc0-421f-9296-fbd017061975"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.239591 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7935ede6-8dc0-421f-9296-fbd017061975-kube-api-access-vz9sn" (OuterVolumeSpecName: "kube-api-access-vz9sn") pod "7935ede6-8dc0-421f-9296-fbd017061975" (UID: "7935ede6-8dc0-421f-9296-fbd017061975"). InnerVolumeSpecName "kube-api-access-vz9sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.276725 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7935ede6-8dc0-421f-9296-fbd017061975" (UID: "7935ede6-8dc0-421f-9296-fbd017061975"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.333370 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz9sn\" (UniqueName: \"kubernetes.io/projected/7935ede6-8dc0-421f-9296-fbd017061975-kube-api-access-vz9sn\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.333609 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.333620 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7935ede6-8dc0-421f-9296-fbd017061975-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.809636 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wspvt" event={"ID":"7935ede6-8dc0-421f-9296-fbd017061975","Type":"ContainerDied","Data":"63328b0f03c5d4e55e52c832fe104add0d2f73ed120e3ce4487366ac61219464"} Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.809712 4747 scope.go:117] "RemoveContainer" containerID="770def2af9fc346e033e3b5664c7dcb87cfe372ae1f3439bb5d233e1fdd891d7" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.809964 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wspvt" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.828604 4747 scope.go:117] "RemoveContainer" containerID="9b447d01cf71e8cd37c4d8c1f6455d34dc1ff906853f2538cdca135df95f4140" Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.842780 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wspvt"] Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.845873 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wspvt"] Feb 02 09:00:13 crc kubenswrapper[4747]: I0202 09:00:13.866637 4747 scope.go:117] "RemoveContainer" containerID="61aeaa3d0716225c85f6ea227e5b6fb037196627335ac9e811501583e4e47972" Feb 02 09:00:14 crc kubenswrapper[4747]: I0202 09:00:14.346661 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7935ede6-8dc0-421f-9296-fbd017061975" path="/var/lib/kubelet/pods/7935ede6-8dc0-421f-9296-fbd017061975/volumes" Feb 02 09:00:17 crc kubenswrapper[4747]: I0202 09:00:17.321482 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 09:00:17 crc kubenswrapper[4747]: I0202 09:00:17.364061 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 09:00:17 crc kubenswrapper[4747]: I0202 09:00:17.533756 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 09:00:18 crc kubenswrapper[4747]: I0202 09:00:18.693322 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dcbr8"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.276561 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dpz98"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.277381 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dpz98" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="registry-server" containerID="cri-o://64e53759c6ad143460d40fd9b63fb4c514b75e883fb58315c22d46034eebebfb" gracePeriod=30 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.283980 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bmlc4"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.284488 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bmlc4" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="registry-server" containerID="cri-o://2444a5b17144cd4063182e041ce4792170592ac6f8e1e8c79a0a69201f11e87d" gracePeriod=30 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.288975 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pw275"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.290029 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" containerID="cri-o://4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7" gracePeriod=30 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.296619 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5z2cn"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.298437 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5z2cn" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="registry-server" containerID="cri-o://04e7ea152af00cbbd10d71b1824c6bbfa83760a6b5f917a02c231a764ebd8d43" gracePeriod=30 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.308888 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fpzd5"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.309253 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fpzd5" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="registry-server" containerID="cri-o://d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b" gracePeriod=30 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.352722 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xs49m"] Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.352952 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="extract-content" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.352967 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="extract-content" Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.352980 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="extract-content" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.352986 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="extract-content" Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.352994 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="extract-utilities" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353000 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="extract-utilities" Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.353013 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="registry-server" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353019 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="registry-server" Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.353030 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="extract-utilities" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353036 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="extract-utilities" Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.353043 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="registry-server" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353048 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="registry-server" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353138 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7935ede6-8dc0-421f-9296-fbd017061975" containerName="registry-server" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353149 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e8d277b-4e88-425b-a33f-3d657972fd59" containerName="registry-server" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353425 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xs49m"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353494 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.353868 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8jt9"] Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.354127 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p8jt9" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="registry-server" containerID="cri-o://acafef1c1423db88c87b95553872b6ecaae576a3bad1ac7111a8f2c8170a1d87" gracePeriod=30 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.356700 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pw275 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": read tcp 10.217.0.2:56770->10.217.0.36:8080: read: connection reset by peer" start-of-body= Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.356735 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": read tcp 10.217.0.2:56770->10.217.0.36:8080: read: connection reset by peer" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.427446 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f85f49d-a5d1-4b38-965c-e02d64134491-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.427494 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0f85f49d-a5d1-4b38-965c-e02d64134491-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.427711 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8fql\" (UniqueName: \"kubernetes.io/projected/0f85f49d-a5d1-4b38-965c-e02d64134491-kube-api-access-r8fql\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.490553 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b is running failed: container process not found" containerID="d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.490897 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b is running failed: container process not found" containerID="d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.491846 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b is running failed: container process not found" containerID="d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.491881 4747 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-fpzd5" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="registry-server" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.518330 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.518410 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.518462 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.519219 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.519285 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c" gracePeriod=600 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.528861 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8fql\" (UniqueName: \"kubernetes.io/projected/0f85f49d-a5d1-4b38-965c-e02d64134491-kube-api-access-r8fql\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.528928 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f85f49d-a5d1-4b38-965c-e02d64134491-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.528979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0f85f49d-a5d1-4b38-965c-e02d64134491-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.530473 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f85f49d-a5d1-4b38-965c-e02d64134491-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.537619 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0f85f49d-a5d1-4b38-965c-e02d64134491-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.551703 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8fql\" (UniqueName: \"kubernetes.io/projected/0f85f49d-a5d1-4b38-965c-e02d64134491-kube-api-access-r8fql\") pod \"marketplace-operator-79b997595-xs49m\" (UID: \"0f85f49d-a5d1-4b38-965c-e02d64134491\") " pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.851443 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.856304 4747 generic.go:334] "Generic (PLEG): container finished" podID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerID="64e53759c6ad143460d40fd9b63fb4c514b75e883fb58315c22d46034eebebfb" exitCode=0 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.856368 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpz98" event={"ID":"87ba3296-51e6-4641-acbc-e24b60ffe91c","Type":"ContainerDied","Data":"64e53759c6ad143460d40fd9b63fb4c514b75e883fb58315c22d46034eebebfb"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.861518 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.861790 4747 generic.go:334] "Generic (PLEG): container finished" podID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerID="2444a5b17144cd4063182e041ce4792170592ac6f8e1e8c79a0a69201f11e87d" exitCode=0 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.861836 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerDied","Data":"2444a5b17144cd4063182e041ce4792170592ac6f8e1e8c79a0a69201f11e87d"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.866014 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c" exitCode=0 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.866161 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.870643 4747 generic.go:334] "Generic (PLEG): container finished" podID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerID="4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7" exitCode=0 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.870697 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" event={"ID":"42f4cc75-06b6-48f7-95cb-915be0b67e72","Type":"ContainerDied","Data":"4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.870745 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" event={"ID":"42f4cc75-06b6-48f7-95cb-915be0b67e72","Type":"ContainerDied","Data":"d2c619408f27133ac2f9132f6678f1c88f1988fbf904863994aee9b4cfaa4250"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.870770 4747 scope.go:117] "RemoveContainer" containerID="4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.870995 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pw275" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.878231 4747 generic.go:334] "Generic (PLEG): container finished" podID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerID="acafef1c1423db88c87b95553872b6ecaae576a3bad1ac7111a8f2c8170a1d87" exitCode=0 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.878315 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jt9" event={"ID":"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989","Type":"ContainerDied","Data":"acafef1c1423db88c87b95553872b6ecaae576a3bad1ac7111a8f2c8170a1d87"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.880134 4747 generic.go:334] "Generic (PLEG): container finished" podID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerID="04e7ea152af00cbbd10d71b1824c6bbfa83760a6b5f917a02c231a764ebd8d43" exitCode=0 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.880188 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5z2cn" event={"ID":"52602e18-9a62-4ee3-bfa3-530eb601caa9","Type":"ContainerDied","Data":"04e7ea152af00cbbd10d71b1824c6bbfa83760a6b5f917a02c231a764ebd8d43"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.889381 4747 generic.go:334] "Generic (PLEG): container finished" podID="557f0623-5fe6-48cd-a958-88330d792ba8" containerID="d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b" exitCode=0 Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.889424 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpzd5" event={"ID":"557f0623-5fe6-48cd-a958-88330d792ba8","Type":"ContainerDied","Data":"d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b"} Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.919622 4747 scope.go:117] "RemoveContainer" containerID="4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7" Feb 02 09:00:20 crc kubenswrapper[4747]: E0202 09:00:20.922642 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7\": container with ID starting with 4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7 not found: ID does not exist" containerID="4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.922670 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7"} err="failed to get container status \"4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7\": rpc error: code = NotFound desc = could not find container \"4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7\": container with ID starting with 4e1e6896ce4d4b2bbee2eaf30e3e4620372eebfbd8c6133a3406220c2d4a5ab7 not found: ID does not exist" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.935037 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-operator-metrics\") pod \"42f4cc75-06b6-48f7-95cb-915be0b67e72\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.935075 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65p6n\" (UniqueName: \"kubernetes.io/projected/42f4cc75-06b6-48f7-95cb-915be0b67e72-kube-api-access-65p6n\") pod \"42f4cc75-06b6-48f7-95cb-915be0b67e72\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.935106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-trusted-ca\") pod \"42f4cc75-06b6-48f7-95cb-915be0b67e72\" (UID: \"42f4cc75-06b6-48f7-95cb-915be0b67e72\") " Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.935801 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "42f4cc75-06b6-48f7-95cb-915be0b67e72" (UID: "42f4cc75-06b6-48f7-95cb-915be0b67e72"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.949190 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "42f4cc75-06b6-48f7-95cb-915be0b67e72" (UID: "42f4cc75-06b6-48f7-95cb-915be0b67e72"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:20 crc kubenswrapper[4747]: I0202 09:00:20.959217 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42f4cc75-06b6-48f7-95cb-915be0b67e72-kube-api-access-65p6n" (OuterVolumeSpecName: "kube-api-access-65p6n") pod "42f4cc75-06b6-48f7-95cb-915be0b67e72" (UID: "42f4cc75-06b6-48f7-95cb-915be0b67e72"). InnerVolumeSpecName "kube-api-access-65p6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.036966 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.037016 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65p6n\" (UniqueName: \"kubernetes.io/projected/42f4cc75-06b6-48f7-95cb-915be0b67e72-kube-api-access-65p6n\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.037029 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/42f4cc75-06b6-48f7-95cb-915be0b67e72-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.187735 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.205798 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.226544 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pw275"] Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.229482 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pw275"] Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.237720 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.237903 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.314367 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354417 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-utilities\") pod \"87ba3296-51e6-4641-acbc-e24b60ffe91c\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354453 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-utilities\") pod \"557f0623-5fe6-48cd-a958-88330d792ba8\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354483 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4kzn\" (UniqueName: \"kubernetes.io/projected/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-kube-api-access-j4kzn\") pod \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354507 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-catalog-content\") pod \"87ba3296-51e6-4641-acbc-e24b60ffe91c\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354527 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-catalog-content\") pod \"557f0623-5fe6-48cd-a958-88330d792ba8\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354546 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-utilities\") pod \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354568 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sswdn\" (UniqueName: \"kubernetes.io/projected/557f0623-5fe6-48cd-a958-88330d792ba8-kube-api-access-sswdn\") pod \"557f0623-5fe6-48cd-a958-88330d792ba8\" (UID: \"557f0623-5fe6-48cd-a958-88330d792ba8\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354623 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq2nn\" (UniqueName: \"kubernetes.io/projected/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-kube-api-access-cq2nn\") pod \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354654 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v569z\" (UniqueName: \"kubernetes.io/projected/87ba3296-51e6-4641-acbc-e24b60ffe91c-kube-api-access-v569z\") pod \"87ba3296-51e6-4641-acbc-e24b60ffe91c\" (UID: \"87ba3296-51e6-4641-acbc-e24b60ffe91c\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354685 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-catalog-content\") pod \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\" (UID: \"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354710 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-catalog-content\") pod \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.354726 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-utilities\") pod \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\" (UID: \"0cdcfe28-8ae4-4938-8d64-d8255b92cf90\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.355746 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-utilities" (OuterVolumeSpecName: "utilities") pod "30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" (UID: "30faf8f7-e02e-4fbd-b23b-6ad3c9e71989"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.355782 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-utilities" (OuterVolumeSpecName: "utilities") pod "0cdcfe28-8ae4-4938-8d64-d8255b92cf90" (UID: "0cdcfe28-8ae4-4938-8d64-d8255b92cf90"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.356413 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-utilities" (OuterVolumeSpecName: "utilities") pod "87ba3296-51e6-4641-acbc-e24b60ffe91c" (UID: "87ba3296-51e6-4641-acbc-e24b60ffe91c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.362896 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-kube-api-access-j4kzn" (OuterVolumeSpecName: "kube-api-access-j4kzn") pod "0cdcfe28-8ae4-4938-8d64-d8255b92cf90" (UID: "0cdcfe28-8ae4-4938-8d64-d8255b92cf90"). InnerVolumeSpecName "kube-api-access-j4kzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.363109 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87ba3296-51e6-4641-acbc-e24b60ffe91c-kube-api-access-v569z" (OuterVolumeSpecName: "kube-api-access-v569z") pod "87ba3296-51e6-4641-acbc-e24b60ffe91c" (UID: "87ba3296-51e6-4641-acbc-e24b60ffe91c"). InnerVolumeSpecName "kube-api-access-v569z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.363870 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-utilities" (OuterVolumeSpecName: "utilities") pod "557f0623-5fe6-48cd-a958-88330d792ba8" (UID: "557f0623-5fe6-48cd-a958-88330d792ba8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.364025 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/557f0623-5fe6-48cd-a958-88330d792ba8-kube-api-access-sswdn" (OuterVolumeSpecName: "kube-api-access-sswdn") pod "557f0623-5fe6-48cd-a958-88330d792ba8" (UID: "557f0623-5fe6-48cd-a958-88330d792ba8"). InnerVolumeSpecName "kube-api-access-sswdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.366195 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-kube-api-access-cq2nn" (OuterVolumeSpecName: "kube-api-access-cq2nn") pod "30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" (UID: "30faf8f7-e02e-4fbd-b23b-6ad3c9e71989"). InnerVolumeSpecName "kube-api-access-cq2nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.455876 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-utilities\") pod \"52602e18-9a62-4ee3-bfa3-530eb601caa9\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.456066 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxb2c\" (UniqueName: \"kubernetes.io/projected/52602e18-9a62-4ee3-bfa3-530eb601caa9-kube-api-access-vxb2c\") pod \"52602e18-9a62-4ee3-bfa3-530eb601caa9\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.456259 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-catalog-content\") pod \"52602e18-9a62-4ee3-bfa3-530eb601caa9\" (UID: \"52602e18-9a62-4ee3-bfa3-530eb601caa9\") " Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.457193 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-utilities" (OuterVolumeSpecName: "utilities") pod "52602e18-9a62-4ee3-bfa3-530eb601caa9" (UID: "52602e18-9a62-4ee3-bfa3-530eb601caa9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.461077 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "87ba3296-51e6-4641-acbc-e24b60ffe91c" (UID: "87ba3296-51e6-4641-acbc-e24b60ffe91c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.465094 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52602e18-9a62-4ee3-bfa3-530eb601caa9-kube-api-access-vxb2c" (OuterVolumeSpecName: "kube-api-access-vxb2c") pod "52602e18-9a62-4ee3-bfa3-530eb601caa9" (UID: "52602e18-9a62-4ee3-bfa3-530eb601caa9"). InnerVolumeSpecName "kube-api-access-vxb2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468808 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468829 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468840 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4kzn\" (UniqueName: \"kubernetes.io/projected/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-kube-api-access-j4kzn\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468849 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ba3296-51e6-4641-acbc-e24b60ffe91c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468858 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468866 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sswdn\" (UniqueName: \"kubernetes.io/projected/557f0623-5fe6-48cd-a958-88330d792ba8-kube-api-access-sswdn\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468874 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxb2c\" (UniqueName: \"kubernetes.io/projected/52602e18-9a62-4ee3-bfa3-530eb601caa9-kube-api-access-vxb2c\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468883 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq2nn\" (UniqueName: \"kubernetes.io/projected/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-kube-api-access-cq2nn\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468891 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v569z\" (UniqueName: \"kubernetes.io/projected/87ba3296-51e6-4641-acbc-e24b60ffe91c-kube-api-access-v569z\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468900 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.468909 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.499357 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0cdcfe28-8ae4-4938-8d64-d8255b92cf90" (UID: "0cdcfe28-8ae4-4938-8d64-d8255b92cf90"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.500705 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52602e18-9a62-4ee3-bfa3-530eb601caa9" (UID: "52602e18-9a62-4ee3-bfa3-530eb601caa9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.559907 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "557f0623-5fe6-48cd-a958-88330d792ba8" (UID: "557f0623-5fe6-48cd-a958-88330d792ba8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.566250 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" (UID: "30faf8f7-e02e-4fbd-b23b-6ad3c9e71989"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.566414 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xs49m"] Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.570020 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557f0623-5fe6-48cd-a958-88330d792ba8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.570044 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52602e18-9a62-4ee3-bfa3-530eb601caa9-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.570053 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.570062 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cdcfe28-8ae4-4938-8d64-d8255b92cf90-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.897582 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"7cee0f9aad4ab5e00f2a9b56f3dbf6ac0c95c1aa267e52c4fe110c7bcef4e605"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.902474 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jt9" event={"ID":"30faf8f7-e02e-4fbd-b23b-6ad3c9e71989","Type":"ContainerDied","Data":"a895938f8cb8b65e4a8cc5d4c218adcd82a1502a517361f29b4ccc8ccd40ecee"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.902518 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jt9" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.902529 4747 scope.go:117] "RemoveContainer" containerID="acafef1c1423db88c87b95553872b6ecaae576a3bad1ac7111a8f2c8170a1d87" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.903991 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" event={"ID":"0f85f49d-a5d1-4b38-965c-e02d64134491","Type":"ContainerStarted","Data":"cb3569e899a96ed50841600468d24a2f9414b3e57f8de536dd9707b8510b0ed7"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.904029 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" event={"ID":"0f85f49d-a5d1-4b38-965c-e02d64134491","Type":"ContainerStarted","Data":"302ba2d1b1ee8509ece0c5a6362aac0f8c545a942c5f7217627e0dfd87b7b79d"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.904489 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.906093 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xs49m container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" start-of-body= Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.906138 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" podUID="0f85f49d-a5d1-4b38-965c-e02d64134491" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.906758 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5z2cn" event={"ID":"52602e18-9a62-4ee3-bfa3-530eb601caa9","Type":"ContainerDied","Data":"4a56f6dfc2b08619d58446b67998cc8e596f2108e610c38296c10836c0a03fbf"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.906830 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5z2cn" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.909854 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fpzd5" event={"ID":"557f0623-5fe6-48cd-a958-88330d792ba8","Type":"ContainerDied","Data":"301b44488a09d51ab18526215f5968b577b2d89c67a08204c0f66aedb44b2d74"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.909961 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fpzd5" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.916806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpz98" event={"ID":"87ba3296-51e6-4641-acbc-e24b60ffe91c","Type":"ContainerDied","Data":"51abeb0e0409ff9aef304fa69914309d28c06183f6d84d04c5518ceead739a22"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.916912 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpz98" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.923036 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bmlc4" event={"ID":"0cdcfe28-8ae4-4938-8d64-d8255b92cf90","Type":"ContainerDied","Data":"ac369f432dda821d974d7ba0d584819faf5dcd9ca30b16da771469182c883986"} Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.923099 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bmlc4" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.928248 4747 scope.go:117] "RemoveContainer" containerID="5f01e542dbff96d1173dff2c62a47aa6c1e96e5025c8db7316f5f64e4348388c" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.942973 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" podStartSLOduration=1.9429522399999999 podStartE2EDuration="1.94295224s" podCreationTimestamp="2026-02-02 09:00:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:00:21.940286032 +0000 UTC m=+234.484624465" watchObservedRunningTime="2026-02-02 09:00:21.94295224 +0000 UTC m=+234.487290673" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.952959 4747 scope.go:117] "RemoveContainer" containerID="2bf33c8d468ce2b262e859b0d2c4598eeb2b09a697c1225f071f4cff05c7c4e2" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.959201 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8jt9"] Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.962926 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p8jt9"] Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.986157 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fpzd5"] Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.990428 4747 scope.go:117] "RemoveContainer" containerID="04e7ea152af00cbbd10d71b1824c6bbfa83760a6b5f917a02c231a764ebd8d43" Feb 02 09:00:21 crc kubenswrapper[4747]: I0202 09:00:21.999253 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fpzd5"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.003192 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bmlc4"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.007099 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bmlc4"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.016993 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5z2cn"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.018155 4747 scope.go:117] "RemoveContainer" containerID="91d440e894facd5f0c8be86275bbe0ffd30ca23aa07c346c2f02dc2de19b6ea3" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.019792 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5z2cn"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.035097 4747 scope.go:117] "RemoveContainer" containerID="87af9c722640eb65a3f8c932ee5aad89d4e2246ca747a004f23a0de6eed8f8fd" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.036059 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dpz98"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.040747 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dpz98"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.056206 4747 scope.go:117] "RemoveContainer" containerID="d1c94d5337f19cb82801c060966e881a8a42196516976213d5b83c514046e27b" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.071224 4747 scope.go:117] "RemoveContainer" containerID="fda4247cafdc9448d1856224d2c914875d950c718db80f06ea3db2730ac3ab1d" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.091858 4747 scope.go:117] "RemoveContainer" containerID="6eb38d8fb2c4a8d7e20479c997401ce459bc92c121ef2017b359432cb97e68f2" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.105005 4747 scope.go:117] "RemoveContainer" containerID="64e53759c6ad143460d40fd9b63fb4c514b75e883fb58315c22d46034eebebfb" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.129788 4747 scope.go:117] "RemoveContainer" containerID="f213576c634d6a84ded43ecaf335c00b7ea2aedfe1a0a115a1fc29dccff98f5b" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.145450 4747 scope.go:117] "RemoveContainer" containerID="8104835b4258d61d7dcc0b06b3453714df76b3eadaf4822ab2d16f35832ad542" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.175205 4747 scope.go:117] "RemoveContainer" containerID="2444a5b17144cd4063182e041ce4792170592ac6f8e1e8c79a0a69201f11e87d" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.190216 4747 scope.go:117] "RemoveContainer" containerID="8888085c082fb6e4d4a8c9025060bb93960c273ddccefa2bf3994b9950ee2cfa" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.210796 4747 scope.go:117] "RemoveContainer" containerID="c50f83d321194b14df33998a3f1fc355f885242cbcbbab4a20337bb1bb53af5d" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.326318 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-9b947f799-kfzsz"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.326589 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" podUID="b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" containerName="controller-manager" containerID="cri-o://01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89" gracePeriod=30 Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.345630 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" path="/var/lib/kubelet/pods/0cdcfe28-8ae4-4938-8d64-d8255b92cf90/volumes" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.346458 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" path="/var/lib/kubelet/pods/30faf8f7-e02e-4fbd-b23b-6ad3c9e71989/volumes" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.347323 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" path="/var/lib/kubelet/pods/42f4cc75-06b6-48f7-95cb-915be0b67e72/volumes" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.348634 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" path="/var/lib/kubelet/pods/52602e18-9a62-4ee3-bfa3-530eb601caa9/volumes" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.350853 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" path="/var/lib/kubelet/pods/557f0623-5fe6-48cd-a958-88330d792ba8/volumes" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.352193 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" path="/var/lib/kubelet/pods/87ba3296-51e6-4641-acbc-e24b60ffe91c/volumes" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.429478 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.430024 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" podUID="8a3de2f8-4721-4746-9a12-2047ae2ff972" containerName="route-controller-manager" containerID="cri-o://328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778" gracePeriod=30 Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615155 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6bxvq"] Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615800 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615819 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615829 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615835 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615847 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615855 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615862 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615869 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615876 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615884 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615892 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615899 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615911 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615918 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615928 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615952 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615963 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615970 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.615980 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.615987 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.616001 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616008 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.616018 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616025 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.616035 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616041 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.616049 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616056 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.616066 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616073 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="extract-utilities" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.616083 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616089 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="extract-content" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616186 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="87ba3296-51e6-4641-acbc-e24b60ffe91c" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616197 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="557f0623-5fe6-48cd-a958-88330d792ba8" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616205 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="30faf8f7-e02e-4fbd-b23b-6ad3c9e71989" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616214 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cdcfe28-8ae4-4938-8d64-d8255b92cf90" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616225 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="52602e18-9a62-4ee3-bfa3-530eb601caa9" containerName="registry-server" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.616235 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="42f4cc75-06b6-48f7-95cb-915be0b67e72" containerName="marketplace-operator" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.617039 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.620499 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.632280 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6bxvq"] Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.790602 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a977f70f-0d8f-4480-be1f-0b48d191b054-utilities\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.790679 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk5gk\" (UniqueName: \"kubernetes.io/projected/a977f70f-0d8f-4480-be1f-0b48d191b054-kube-api-access-kk5gk\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.790711 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a977f70f-0d8f-4480-be1f-0b48d191b054-catalog-content\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.891979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a977f70f-0d8f-4480-be1f-0b48d191b054-utilities\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.892102 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk5gk\" (UniqueName: \"kubernetes.io/projected/a977f70f-0d8f-4480-be1f-0b48d191b054-kube-api-access-kk5gk\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.892129 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a977f70f-0d8f-4480-be1f-0b48d191b054-catalog-content\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.892918 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a977f70f-0d8f-4480-be1f-0b48d191b054-catalog-content\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.892978 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a977f70f-0d8f-4480-be1f-0b48d191b054-utilities\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.905455 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.910415 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.912153 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk5gk\" (UniqueName: \"kubernetes.io/projected/a977f70f-0d8f-4480-be1f-0b48d191b054-kube-api-access-kk5gk\") pod \"certified-operators-6bxvq\" (UID: \"a977f70f-0d8f-4480-be1f-0b48d191b054\") " pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.931341 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" containerID="01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89" exitCode=0 Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.931393 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.931434 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" event={"ID":"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4","Type":"ContainerDied","Data":"01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89"} Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.931483 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9b947f799-kfzsz" event={"ID":"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4","Type":"ContainerDied","Data":"2e1cc1f36676a68ea3f501599e5b7dbbfc9c9729ad2ff3813d9962ee06d1d092"} Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.931508 4747 scope.go:117] "RemoveContainer" containerID="01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.934860 4747 generic.go:334] "Generic (PLEG): container finished" podID="8a3de2f8-4721-4746-9a12-2047ae2ff972" containerID="328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778" exitCode=0 Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.934887 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.934952 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" event={"ID":"8a3de2f8-4721-4746-9a12-2047ae2ff972","Type":"ContainerDied","Data":"328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778"} Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.934991 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" event={"ID":"8a3de2f8-4721-4746-9a12-2047ae2ff972","Type":"ContainerDied","Data":"2e866cb0246c34b766edf07c4fd0fcdd0aa370a253c6a456360165d35ebf5398"} Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.934961 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.944815 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xs49m" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.965496 4747 scope.go:117] "RemoveContainer" containerID="01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.968292 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89\": container with ID starting with 01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89 not found: ID does not exist" containerID="01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.968348 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89"} err="failed to get container status \"01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89\": rpc error: code = NotFound desc = could not find container \"01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89\": container with ID starting with 01a560edb1fd43c70e1959a4ed43c0a815e469564001bac15101816c5abfea89 not found: ID does not exist" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.968390 4747 scope.go:117] "RemoveContainer" containerID="328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993135 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhpmx\" (UniqueName: \"kubernetes.io/projected/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-kube-api-access-qhpmx\") pod \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993185 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-serving-cert\") pod \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993209 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a3de2f8-4721-4746-9a12-2047ae2ff972-serving-cert\") pod \"8a3de2f8-4721-4746-9a12-2047ae2ff972\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993240 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-config\") pod \"8a3de2f8-4721-4746-9a12-2047ae2ff972\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993263 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lfxx\" (UniqueName: \"kubernetes.io/projected/8a3de2f8-4721-4746-9a12-2047ae2ff972-kube-api-access-5lfxx\") pod \"8a3de2f8-4721-4746-9a12-2047ae2ff972\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993296 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-proxy-ca-bundles\") pod \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993327 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-config\") pod \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993351 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-client-ca\") pod \"8a3de2f8-4721-4746-9a12-2047ae2ff972\" (UID: \"8a3de2f8-4721-4746-9a12-2047ae2ff972\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.993375 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-client-ca\") pod \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\" (UID: \"b3e7c5e0-6a5d-401c-9b44-d05468fa98d4\") " Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.996353 4747 scope.go:117] "RemoveContainer" containerID="328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.996586 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-client-ca" (OuterVolumeSpecName: "client-ca") pod "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" (UID: "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.997998 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-kube-api-access-qhpmx" (OuterVolumeSpecName: "kube-api-access-qhpmx") pod "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" (UID: "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4"). InnerVolumeSpecName "kube-api-access-qhpmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.998221 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-config" (OuterVolumeSpecName: "config") pod "8a3de2f8-4721-4746-9a12-2047ae2ff972" (UID: "8a3de2f8-4721-4746-9a12-2047ae2ff972"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:22 crc kubenswrapper[4747]: E0202 09:00:22.998441 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778\": container with ID starting with 328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778 not found: ID does not exist" containerID="328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.998469 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778"} err="failed to get container status \"328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778\": rpc error: code = NotFound desc = could not find container \"328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778\": container with ID starting with 328c2c028a1302c5abb4852f439a087874a194d88dc7a860e93c2012e5a78778 not found: ID does not exist" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.998562 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-client-ca" (OuterVolumeSpecName: "client-ca") pod "8a3de2f8-4721-4746-9a12-2047ae2ff972" (UID: "8a3de2f8-4721-4746-9a12-2047ae2ff972"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.998688 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-config" (OuterVolumeSpecName: "config") pod "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" (UID: "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:22 crc kubenswrapper[4747]: I0202 09:00:22.999854 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" (UID: "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.000170 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a3de2f8-4721-4746-9a12-2047ae2ff972-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8a3de2f8-4721-4746-9a12-2047ae2ff972" (UID: "8a3de2f8-4721-4746-9a12-2047ae2ff972"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.003516 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" (UID: "b3e7c5e0-6a5d-401c-9b44-d05468fa98d4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.003890 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a3de2f8-4721-4746-9a12-2047ae2ff972-kube-api-access-5lfxx" (OuterVolumeSpecName: "kube-api-access-5lfxx") pod "8a3de2f8-4721-4746-9a12-2047ae2ff972" (UID: "8a3de2f8-4721-4746-9a12-2047ae2ff972"). InnerVolumeSpecName "kube-api-access-5lfxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.094927 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhpmx\" (UniqueName: \"kubernetes.io/projected/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-kube-api-access-qhpmx\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.094988 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.094998 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a3de2f8-4721-4746-9a12-2047ae2ff972-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.095008 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.095292 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lfxx\" (UniqueName: \"kubernetes.io/projected/8a3de2f8-4721-4746-9a12-2047ae2ff972-kube-api-access-5lfxx\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.095305 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.095315 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.095342 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a3de2f8-4721-4746-9a12-2047ae2ff972-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.095539 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.177945 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6bxvq"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.261107 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-9b947f799-kfzsz"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.263949 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-9b947f799-kfzsz"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.290301 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.294189 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75d4944dc7-2t7rb"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.614760 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8kk9w"] Feb 02 09:00:23 crc kubenswrapper[4747]: E0202 09:00:23.615024 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" containerName="controller-manager" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.615043 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" containerName="controller-manager" Feb 02 09:00:23 crc kubenswrapper[4747]: E0202 09:00:23.615061 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a3de2f8-4721-4746-9a12-2047ae2ff972" containerName="route-controller-manager" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.615069 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a3de2f8-4721-4746-9a12-2047ae2ff972" containerName="route-controller-manager" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.615185 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a3de2f8-4721-4746-9a12-2047ae2ff972" containerName="route-controller-manager" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.615203 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" containerName="controller-manager" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.616018 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.618074 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.625195 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8kk9w"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.702250 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sqv7\" (UniqueName: \"kubernetes.io/projected/72a35cfa-73ba-4baf-b7c0-2947ca69a797-kube-api-access-7sqv7\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.702315 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a35cfa-73ba-4baf-b7c0-2947ca69a797-utilities\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.702338 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a35cfa-73ba-4baf-b7c0-2947ca69a797-catalog-content\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.803912 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sqv7\" (UniqueName: \"kubernetes.io/projected/72a35cfa-73ba-4baf-b7c0-2947ca69a797-kube-api-access-7sqv7\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.804055 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a35cfa-73ba-4baf-b7c0-2947ca69a797-utilities\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.804083 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a35cfa-73ba-4baf-b7c0-2947ca69a797-catalog-content\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.804689 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72a35cfa-73ba-4baf-b7c0-2947ca69a797-catalog-content\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.804971 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72a35cfa-73ba-4baf-b7c0-2947ca69a797-utilities\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.823661 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sqv7\" (UniqueName: \"kubernetes.io/projected/72a35cfa-73ba-4baf-b7c0-2947ca69a797-kube-api-access-7sqv7\") pod \"community-operators-8kk9w\" (UID: \"72a35cfa-73ba-4baf-b7c0-2947ca69a797\") " pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.932017 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-9c8dfffc8-r4rql"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.932841 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.935206 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.935812 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.935964 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.936036 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.936084 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.937814 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.943847 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.944038 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.944847 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.946230 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.948070 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.948081 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.948300 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.948509 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.948657 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.960153 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv"] Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.960748 4747 generic.go:334] "Generic (PLEG): container finished" podID="a977f70f-0d8f-4480-be1f-0b48d191b054" containerID="94a9ad5836f5858399e3e79fcc37ae7ec83a2b8674ea5ad1d48d1ce9c54b8310" exitCode=0 Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.960826 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bxvq" event={"ID":"a977f70f-0d8f-4480-be1f-0b48d191b054","Type":"ContainerDied","Data":"94a9ad5836f5858399e3e79fcc37ae7ec83a2b8674ea5ad1d48d1ce9c54b8310"} Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.960853 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bxvq" event={"ID":"a977f70f-0d8f-4480-be1f-0b48d191b054","Type":"ContainerStarted","Data":"c7a3c446c392a02948697cc9a266424d32cc8fbec8c7832d315e11a4c48eeb20"} Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.965159 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:23 crc kubenswrapper[4747]: I0202 09:00:23.978380 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-9c8dfffc8-r4rql"] Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.107902 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-proxy-ca-bundles\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.107983 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llw8l\" (UniqueName: \"kubernetes.io/projected/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-kube-api-access-llw8l\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.108035 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-client-ca\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.108062 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-config\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.108085 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-serving-cert\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.108152 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c35894c-4a21-4115-a113-267bd19584c5-serving-cert\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.108222 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7b8w\" (UniqueName: \"kubernetes.io/projected/7c35894c-4a21-4115-a113-267bd19584c5-kube-api-access-n7b8w\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.108244 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-client-ca\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.108278 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-config\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209701 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-config\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209754 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-proxy-ca-bundles\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209781 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llw8l\" (UniqueName: \"kubernetes.io/projected/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-kube-api-access-llw8l\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209815 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-client-ca\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209841 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-config\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209857 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-serving-cert\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209886 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c35894c-4a21-4115-a113-267bd19584c5-serving-cert\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209915 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7b8w\" (UniqueName: \"kubernetes.io/projected/7c35894c-4a21-4115-a113-267bd19584c5-kube-api-access-n7b8w\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.209950 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-client-ca\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.210728 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-client-ca\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.211211 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-config\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.211687 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c35894c-4a21-4115-a113-267bd19584c5-proxy-ca-bundles\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.212579 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-client-ca\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.213111 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-config\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.216158 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-serving-cert\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.219070 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c35894c-4a21-4115-a113-267bd19584c5-serving-cert\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.230016 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7b8w\" (UniqueName: \"kubernetes.io/projected/7c35894c-4a21-4115-a113-267bd19584c5-kube-api-access-n7b8w\") pod \"controller-manager-9c8dfffc8-r4rql\" (UID: \"7c35894c-4a21-4115-a113-267bd19584c5\") " pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.231232 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llw8l\" (UniqueName: \"kubernetes.io/projected/a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4-kube-api-access-llw8l\") pod \"route-controller-manager-57c8d56458-t5xpv\" (UID: \"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4\") " pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.277414 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.285342 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.349746 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a3de2f8-4721-4746-9a12-2047ae2ff972" path="/var/lib/kubelet/pods/8a3de2f8-4721-4746-9a12-2047ae2ff972/volumes" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.353487 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3e7c5e0-6a5d-401c-9b44-d05468fa98d4" path="/var/lib/kubelet/pods/b3e7c5e0-6a5d-401c-9b44-d05468fa98d4/volumes" Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.354477 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8kk9w"] Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.683126 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv"] Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.771255 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-9c8dfffc8-r4rql"] Feb 02 09:00:24 crc kubenswrapper[4747]: W0202 09:00:24.778712 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c35894c_4a21_4115_a113_267bd19584c5.slice/crio-794c6486a1d74251a911c4769aa42a893a6c3d0d74d1d0078caa467164ae80fe WatchSource:0}: Error finding container 794c6486a1d74251a911c4769aa42a893a6c3d0d74d1d0078caa467164ae80fe: Status 404 returned error can't find the container with id 794c6486a1d74251a911c4769aa42a893a6c3d0d74d1d0078caa467164ae80fe Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.974083 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" event={"ID":"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4","Type":"ContainerStarted","Data":"3e813c696909bde96be632c08260b34e0d548e4656717c7e57e152a15efb3a21"} Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.974126 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" event={"ID":"a757a8d6-0f3c-428c-9ada-35f3aa3fe3c4","Type":"ContainerStarted","Data":"5846f6d71abce605692ec503d397e257ecfc8a62a057a25aa6636ea0e02ec326"} Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.975977 4747 generic.go:334] "Generic (PLEG): container finished" podID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" containerID="b5ed585325736ead63863fc2933966dc74d951c2eb6428b5e1514abd08dab27e" exitCode=0 Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.976061 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8kk9w" event={"ID":"72a35cfa-73ba-4baf-b7c0-2947ca69a797","Type":"ContainerDied","Data":"b5ed585325736ead63863fc2933966dc74d951c2eb6428b5e1514abd08dab27e"} Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.976098 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8kk9w" event={"ID":"72a35cfa-73ba-4baf-b7c0-2947ca69a797","Type":"ContainerStarted","Data":"c01afc5581f328349eec1cd7f499e18211a07ece0fb031e733e752532a020b98"} Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.978384 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" event={"ID":"7c35894c-4a21-4115-a113-267bd19584c5","Type":"ContainerStarted","Data":"606c42e901bc6b5bb5c883beaed36f7027d60348c41b0d4ab6f824c7d0691cfd"} Feb 02 09:00:24 crc kubenswrapper[4747]: I0202 09:00:24.978438 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" event={"ID":"7c35894c-4a21-4115-a113-267bd19584c5","Type":"ContainerStarted","Data":"794c6486a1d74251a911c4769aa42a893a6c3d0d74d1d0078caa467164ae80fe"} Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.011962 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhvr"] Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.013107 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.014796 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.022195 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhvr"] Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.121541 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48lnv\" (UniqueName: \"kubernetes.io/projected/d487e4d8-ed4a-4adf-b849-70df6155b1e4-kube-api-access-48lnv\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.121972 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d487e4d8-ed4a-4adf-b849-70df6155b1e4-utilities\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.122055 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d487e4d8-ed4a-4adf-b849-70df6155b1e4-catalog-content\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.223484 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d487e4d8-ed4a-4adf-b849-70df6155b1e4-catalog-content\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.223562 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48lnv\" (UniqueName: \"kubernetes.io/projected/d487e4d8-ed4a-4adf-b849-70df6155b1e4-kube-api-access-48lnv\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.223608 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d487e4d8-ed4a-4adf-b849-70df6155b1e4-utilities\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.224055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d487e4d8-ed4a-4adf-b849-70df6155b1e4-catalog-content\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.224097 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d487e4d8-ed4a-4adf-b849-70df6155b1e4-utilities\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.260906 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48lnv\" (UniqueName: \"kubernetes.io/projected/d487e4d8-ed4a-4adf-b849-70df6155b1e4-kube-api-access-48lnv\") pod \"redhat-marketplace-xhhvr\" (UID: \"d487e4d8-ed4a-4adf-b849-70df6155b1e4\") " pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.332359 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.664128 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhhvr"] Feb 02 09:00:25 crc kubenswrapper[4747]: W0202 09:00:25.695792 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd487e4d8_ed4a_4adf_b849_70df6155b1e4.slice/crio-07f338f5b1dc7c6d9efefba4115407e29638b1cda03e9db7874a1ca221370ce0 WatchSource:0}: Error finding container 07f338f5b1dc7c6d9efefba4115407e29638b1cda03e9db7874a1ca221370ce0: Status 404 returned error can't find the container with id 07f338f5b1dc7c6d9efefba4115407e29638b1cda03e9db7874a1ca221370ce0 Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.986020 4747 generic.go:334] "Generic (PLEG): container finished" podID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" containerID="48dd82d73fdc977a32904271bcfd12485f69a20f6832d6c4114c974cc26c1660" exitCode=0 Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.986097 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhvr" event={"ID":"d487e4d8-ed4a-4adf-b849-70df6155b1e4","Type":"ContainerDied","Data":"48dd82d73fdc977a32904271bcfd12485f69a20f6832d6c4114c974cc26c1660"} Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.986130 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhvr" event={"ID":"d487e4d8-ed4a-4adf-b849-70df6155b1e4","Type":"ContainerStarted","Data":"07f338f5b1dc7c6d9efefba4115407e29638b1cda03e9db7874a1ca221370ce0"} Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.989576 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8kk9w" event={"ID":"72a35cfa-73ba-4baf-b7c0-2947ca69a797","Type":"ContainerStarted","Data":"75be1d307e07afe40ebd61a238417ca48577387287f64bb7e65d31ad47e5f4da"} Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.991242 4747 generic.go:334] "Generic (PLEG): container finished" podID="a977f70f-0d8f-4480-be1f-0b48d191b054" containerID="6bf730223af4a96ffe139c9752519265df585c71802113c0e5a38820ee05ab0a" exitCode=0 Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.992669 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bxvq" event={"ID":"a977f70f-0d8f-4480-be1f-0b48d191b054","Type":"ContainerDied","Data":"6bf730223af4a96ffe139c9752519265df585c71802113c0e5a38820ee05ab0a"} Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.992700 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.993562 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:25 crc kubenswrapper[4747]: I0202 09:00:25.998393 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.001656 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.022700 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vd55r"] Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.023804 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.026869 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.035546 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vd55r"] Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.061851 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-57c8d56458-t5xpv" podStartSLOduration=4.061827315 podStartE2EDuration="4.061827315s" podCreationTimestamp="2026-02-02 09:00:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:00:26.05342218 +0000 UTC m=+238.597760643" watchObservedRunningTime="2026-02-02 09:00:26.061827315 +0000 UTC m=+238.606165748" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.072536 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-9c8dfffc8-r4rql" podStartSLOduration=4.072512738 podStartE2EDuration="4.072512738s" podCreationTimestamp="2026-02-02 09:00:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:00:26.068834984 +0000 UTC m=+238.613173417" watchObservedRunningTime="2026-02-02 09:00:26.072512738 +0000 UTC m=+238.616851171" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.137397 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-utilities\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.137731 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwhnd\" (UniqueName: \"kubernetes.io/projected/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-kube-api-access-kwhnd\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.137790 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-catalog-content\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.238871 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-utilities\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.238923 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwhnd\" (UniqueName: \"kubernetes.io/projected/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-kube-api-access-kwhnd\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.238991 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-catalog-content\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.239370 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-catalog-content\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.239372 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-utilities\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.261523 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwhnd\" (UniqueName: \"kubernetes.io/projected/0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a-kube-api-access-kwhnd\") pod \"redhat-operators-vd55r\" (UID: \"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a\") " pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:26 crc kubenswrapper[4747]: I0202 09:00:26.338737 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:26.545567 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vd55r"] Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.008308 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhvr" event={"ID":"d487e4d8-ed4a-4adf-b849-70df6155b1e4","Type":"ContainerStarted","Data":"e981aadfa2306449d29f210a2c9ee6217fe3a5079c2b5bce73aac95c3f0756c4"} Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.013879 4747 generic.go:334] "Generic (PLEG): container finished" podID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" containerID="75be1d307e07afe40ebd61a238417ca48577387287f64bb7e65d31ad47e5f4da" exitCode=0 Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.014205 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8kk9w" event={"ID":"72a35cfa-73ba-4baf-b7c0-2947ca69a797","Type":"ContainerDied","Data":"75be1d307e07afe40ebd61a238417ca48577387287f64bb7e65d31ad47e5f4da"} Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.030811 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bxvq" event={"ID":"a977f70f-0d8f-4480-be1f-0b48d191b054","Type":"ContainerStarted","Data":"503dbb7143e5aab555085297c4b467909cee0c73af4d204666682bbb8cc159da"} Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.035041 4747 generic.go:334] "Generic (PLEG): container finished" podID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" containerID="76f311cc6beac0c1d8d77d3e3e0e9f97ce9e98445c497d8ee8b839164d43b2e6" exitCode=0 Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.035119 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd55r" event={"ID":"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a","Type":"ContainerDied","Data":"76f311cc6beac0c1d8d77d3e3e0e9f97ce9e98445c497d8ee8b839164d43b2e6"} Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.035424 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd55r" event={"ID":"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a","Type":"ContainerStarted","Data":"26534c05c7b7667a1f407f30f9b3fb0ab68b9b7fe221ec7eff9cdae366e4507b"} Feb 02 09:00:27 crc kubenswrapper[4747]: I0202 09:00:27.075566 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6bxvq" podStartSLOduration=2.535179119 podStartE2EDuration="5.075545019s" podCreationTimestamp="2026-02-02 09:00:22 +0000 UTC" firstStartedPulling="2026-02-02 09:00:23.96263077 +0000 UTC m=+236.506969203" lastFinishedPulling="2026-02-02 09:00:26.50299667 +0000 UTC m=+239.047335103" observedRunningTime="2026-02-02 09:00:27.068737855 +0000 UTC m=+239.613076288" watchObservedRunningTime="2026-02-02 09:00:27.075545019 +0000 UTC m=+239.619883452" Feb 02 09:00:28 crc kubenswrapper[4747]: I0202 09:00:28.048582 4747 generic.go:334] "Generic (PLEG): container finished" podID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" containerID="e981aadfa2306449d29f210a2c9ee6217fe3a5079c2b5bce73aac95c3f0756c4" exitCode=0 Feb 02 09:00:28 crc kubenswrapper[4747]: I0202 09:00:28.048990 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhvr" event={"ID":"d487e4d8-ed4a-4adf-b849-70df6155b1e4","Type":"ContainerDied","Data":"e981aadfa2306449d29f210a2c9ee6217fe3a5079c2b5bce73aac95c3f0756c4"} Feb 02 09:00:28 crc kubenswrapper[4747]: I0202 09:00:28.055121 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8kk9w" event={"ID":"72a35cfa-73ba-4baf-b7c0-2947ca69a797","Type":"ContainerStarted","Data":"3daf2745970e946835d8962caa4a914aa3afdd2b8f17d3f9ba0b11f9ccad0c3d"} Feb 02 09:00:28 crc kubenswrapper[4747]: I0202 09:00:28.090186 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8kk9w" podStartSLOduration=2.579087446 podStartE2EDuration="5.090162386s" podCreationTimestamp="2026-02-02 09:00:23 +0000 UTC" firstStartedPulling="2026-02-02 09:00:24.977443262 +0000 UTC m=+237.521781695" lastFinishedPulling="2026-02-02 09:00:27.488518202 +0000 UTC m=+240.032856635" observedRunningTime="2026-02-02 09:00:28.087501748 +0000 UTC m=+240.631840181" watchObservedRunningTime="2026-02-02 09:00:28.090162386 +0000 UTC m=+240.634500819" Feb 02 09:00:29 crc kubenswrapper[4747]: I0202 09:00:29.062155 4747 generic.go:334] "Generic (PLEG): container finished" podID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" containerID="656b31ff380f4d6b0d17227002b24a6d1923bf71d66c78ebbe010c0f03961cc4" exitCode=0 Feb 02 09:00:29 crc kubenswrapper[4747]: I0202 09:00:29.062694 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd55r" event={"ID":"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a","Type":"ContainerDied","Data":"656b31ff380f4d6b0d17227002b24a6d1923bf71d66c78ebbe010c0f03961cc4"} Feb 02 09:00:29 crc kubenswrapper[4747]: I0202 09:00:29.066022 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhhvr" event={"ID":"d487e4d8-ed4a-4adf-b849-70df6155b1e4","Type":"ContainerStarted","Data":"5f72da4a8035870157b126738784e609c13c1932fa3c6c7d62faf3fb32757637"} Feb 02 09:00:29 crc kubenswrapper[4747]: I0202 09:00:29.095733 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xhhvr" podStartSLOduration=2.526689375 podStartE2EDuration="5.09571456s" podCreationTimestamp="2026-02-02 09:00:24 +0000 UTC" firstStartedPulling="2026-02-02 09:00:25.987524212 +0000 UTC m=+238.531862645" lastFinishedPulling="2026-02-02 09:00:28.556549397 +0000 UTC m=+241.100887830" observedRunningTime="2026-02-02 09:00:29.094621943 +0000 UTC m=+241.638960386" watchObservedRunningTime="2026-02-02 09:00:29.09571456 +0000 UTC m=+241.640052993" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.996201 4747 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.997711 4747 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.997894 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.998064 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60" gracePeriod=15 Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.998105 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b" gracePeriod=15 Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.998150 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997" gracePeriod=15 Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.998158 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032" gracePeriod=15 Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.998156 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b" gracePeriod=15 Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999141 4747 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999373 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999388 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999403 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999411 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999421 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999427 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999436 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999442 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999451 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999458 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999465 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999472 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999480 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999486 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 09:00:30 crc kubenswrapper[4747]: E0202 09:00:30.999493 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999500 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999605 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999617 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999625 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999635 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999647 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999657 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 09:00:30 crc kubenswrapper[4747]: I0202 09:00:30.999936 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.040917 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.079426 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vd55r" event={"ID":"0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a","Type":"ContainerStarted","Data":"a9a2af823de6097252662bc2511d17cedfec2d5a04fc4a1a4a8d176d065ff858"} Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.080333 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.080785 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.081311 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105571 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105624 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105647 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105677 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105708 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105749 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105769 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.105799 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207170 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207213 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207238 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207283 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207298 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207317 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207346 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207703 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207706 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207752 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207832 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207836 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207839 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207807 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.207873 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: I0202 09:00:31.338444 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:00:31 crc kubenswrapper[4747]: W0202 09:00:31.417486 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-7763208672e611eef98b74c32f237accce6c91a389db07acfdb56e6f454c3413 WatchSource:0}: Error finding container 7763208672e611eef98b74c32f237accce6c91a389db07acfdb56e6f454c3413: Status 404 returned error can't find the container with id 7763208672e611eef98b74c32f237accce6c91a389db07acfdb56e6f454c3413 Feb 02 09:00:31 crc kubenswrapper[4747]: E0202 09:00:31.419643 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.189062631f122c78 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 09:00:31.419174008 +0000 UTC m=+243.963512441,LastTimestamp:2026-02-02 09:00:31.419174008 +0000 UTC m=+243.963512441,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.086220 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.088632 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.089434 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b" exitCode=0 Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.089465 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032" exitCode=0 Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.089476 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997" exitCode=0 Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.089485 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b" exitCode=2 Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.089567 4747 scope.go:117] "RemoveContainer" containerID="f2fc799b7f1897a2abaa3822ee627649ab8b28e261e0d3dd3a0b7867ac4addb6" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.091388 4747 generic.go:334] "Generic (PLEG): container finished" podID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" containerID="9b150e209bf9fdb5dcb9f8fc7e0ac7c65352e87d52251eb429ace8f751634e69" exitCode=0 Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.091436 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf","Type":"ContainerDied","Data":"9b150e209bf9fdb5dcb9f8fc7e0ac7c65352e87d52251eb429ace8f751634e69"} Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.092060 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.092321 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.092316 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7763208672e611eef98b74c32f237accce6c91a389db07acfdb56e6f454c3413"} Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.092604 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.092850 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:32 crc kubenswrapper[4747]: E0202 09:00:32.241958 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.189062631f122c78 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 09:00:31.419174008 +0000 UTC m=+243.963512441,LastTimestamp:2026-02-02 09:00:31.419174008 +0000 UTC m=+243.963512441,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.936025 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.937058 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.982903 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.983483 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.983788 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.984078 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:32 crc kubenswrapper[4747]: I0202 09:00:32.984488 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.107453 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.207049 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6bxvq" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.207513 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.207954 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.208348 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.208600 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: E0202 09:00:33.416498 4747 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" volumeName="registry-storage" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.453244 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.454117 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.454689 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.455006 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.455427 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.455678 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.455994 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.542572 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.542743 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.542678 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.542962 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.543032 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.543122 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.543543 4747 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.543569 4747 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.543581 4747 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.613624 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.614387 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.614568 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.614850 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.615896 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.616285 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.644283 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kubelet-dir\") pod \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.644420 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kube-api-access\") pod \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.644438 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" (UID: "8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.644498 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-var-lock\") pod \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\" (UID: \"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf\") " Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.644551 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-var-lock" (OuterVolumeSpecName: "var-lock") pod "8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" (UID: "8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.644745 4747 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.644762 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.648923 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" (UID: "8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.746275 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.966331 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:33 crc kubenswrapper[4747]: I0202 09:00:33.966703 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.011966 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.012369 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.012831 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.013285 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.013602 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.013883 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.014269 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.118660 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.119344 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60" exitCode=0 Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.119404 4747 scope.go:117] "RemoveContainer" containerID="f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.119500 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.124447 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf","Type":"ContainerDied","Data":"e725babf3c8c8474916f869ea3999610900024fde76e6bf98238803250e103a4"} Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.124493 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e725babf3c8c8474916f869ea3999610900024fde76e6bf98238803250e103a4" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.124568 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.131102 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e4d54420a38f24e3518d6b881e0c44c90751bbca1bc0191071cb7386b7923877"} Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.132432 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.133459 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.133768 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.134100 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.134390 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.134683 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.137322 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.139976 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.140393 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.140601 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.141284 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.141458 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.147884 4747 scope.go:117] "RemoveContainer" containerID="8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.151619 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.151859 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.152160 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.152413 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.152643 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.153615 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.168864 4747 scope.go:117] "RemoveContainer" containerID="afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.180982 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8kk9w" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.181352 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.181565 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.182401 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.182686 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.182894 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.183225 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.184446 4747 scope.go:117] "RemoveContainer" containerID="e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.198392 4747 scope.go:117] "RemoveContainer" containerID="ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.214285 4747 scope.go:117] "RemoveContainer" containerID="aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.233371 4747 scope.go:117] "RemoveContainer" containerID="f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b" Feb 02 09:00:34 crc kubenswrapper[4747]: E0202 09:00:34.234053 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\": container with ID starting with f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b not found: ID does not exist" containerID="f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.234098 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b"} err="failed to get container status \"f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\": rpc error: code = NotFound desc = could not find container \"f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b\": container with ID starting with f01bdab6cda38b48b768f1b7456e274ed3826a8b61dbc6f7c32311a1d89cdd2b not found: ID does not exist" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.234129 4747 scope.go:117] "RemoveContainer" containerID="8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032" Feb 02 09:00:34 crc kubenswrapper[4747]: E0202 09:00:34.234431 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\": container with ID starting with 8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032 not found: ID does not exist" containerID="8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.234487 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032"} err="failed to get container status \"8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\": rpc error: code = NotFound desc = could not find container \"8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032\": container with ID starting with 8a060ed60ed4f43db1fa68a2ea03e26437d5abf6d31d5bbceaab3acfe7cd5032 not found: ID does not exist" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.234518 4747 scope.go:117] "RemoveContainer" containerID="afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997" Feb 02 09:00:34 crc kubenswrapper[4747]: E0202 09:00:34.235267 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\": container with ID starting with afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997 not found: ID does not exist" containerID="afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.235297 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997"} err="failed to get container status \"afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\": rpc error: code = NotFound desc = could not find container \"afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997\": container with ID starting with afa426430e71de64ec96c4be6cc2c6d8cdd96d26d678e8729ddc0c6db2da4997 not found: ID does not exist" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.235315 4747 scope.go:117] "RemoveContainer" containerID="e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b" Feb 02 09:00:34 crc kubenswrapper[4747]: E0202 09:00:34.235903 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\": container with ID starting with e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b not found: ID does not exist" containerID="e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.235961 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b"} err="failed to get container status \"e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\": rpc error: code = NotFound desc = could not find container \"e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b\": container with ID starting with e717023d39ead9c409450d813c803a6ec2fc8022367ecb8ab66b57c6b43ee07b not found: ID does not exist" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.235996 4747 scope.go:117] "RemoveContainer" containerID="ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60" Feb 02 09:00:34 crc kubenswrapper[4747]: E0202 09:00:34.236224 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\": container with ID starting with ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60 not found: ID does not exist" containerID="ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.236250 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60"} err="failed to get container status \"ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\": rpc error: code = NotFound desc = could not find container \"ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60\": container with ID starting with ccc224b8c85d9f9311c6b03d18025990d1f12f51b218246e3aa37c9cb86b7c60 not found: ID does not exist" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.236267 4747 scope.go:117] "RemoveContainer" containerID="aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13" Feb 02 09:00:34 crc kubenswrapper[4747]: E0202 09:00:34.236485 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\": container with ID starting with aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13 not found: ID does not exist" containerID="aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.236516 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13"} err="failed to get container status \"aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\": rpc error: code = NotFound desc = could not find container \"aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13\": container with ID starting with aac31d03cd894e994997fc329964830f45cb7c023cdbacc861029f8437c32c13 not found: ID does not exist" Feb 02 09:00:34 crc kubenswrapper[4747]: I0202 09:00:34.346025 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.332575 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.332903 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.377675 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.378701 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.379094 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.379705 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.380085 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.380388 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:35 crc kubenswrapper[4747]: I0202 09:00:35.380776 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.193763 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xhhvr" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.194415 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.194779 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.195056 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.195342 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.195618 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.195935 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.347390 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.347706 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.384675 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.385152 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.385344 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.385627 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.386072 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.386308 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:36 crc kubenswrapper[4747]: I0202 09:00:36.386657 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:37 crc kubenswrapper[4747]: I0202 09:00:37.241995 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vd55r" Feb 02 09:00:37 crc kubenswrapper[4747]: I0202 09:00:37.242496 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:37 crc kubenswrapper[4747]: I0202 09:00:37.242726 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:37 crc kubenswrapper[4747]: I0202 09:00:37.243020 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:37 crc kubenswrapper[4747]: I0202 09:00:37.243697 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:37 crc kubenswrapper[4747]: I0202 09:00:37.244031 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:37 crc kubenswrapper[4747]: I0202 09:00:37.244423 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: I0202 09:00:38.342161 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: I0202 09:00:38.342925 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: I0202 09:00:38.343243 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: I0202 09:00:38.343526 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: I0202 09:00:38.343758 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: I0202 09:00:38.344058 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: E0202 09:00:38.913100 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: E0202 09:00:38.913505 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: E0202 09:00:38.913825 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: E0202 09:00:38.914168 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: E0202 09:00:38.914446 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:38 crc kubenswrapper[4747]: I0202 09:00:38.914481 4747 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 02 09:00:38 crc kubenswrapper[4747]: E0202 09:00:38.914692 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="200ms" Feb 02 09:00:39 crc kubenswrapper[4747]: E0202 09:00:39.116133 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="400ms" Feb 02 09:00:39 crc kubenswrapper[4747]: E0202 09:00:39.517588 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="800ms" Feb 02 09:00:40 crc kubenswrapper[4747]: E0202 09:00:40.318650 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="1.6s" Feb 02 09:00:41 crc kubenswrapper[4747]: E0202 09:00:41.922332 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="3.2s" Feb 02 09:00:42 crc kubenswrapper[4747]: E0202 09:00:42.243569 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.189062631f122c78 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 09:00:31.419174008 +0000 UTC m=+243.963512441,LastTimestamp:2026-02-02 09:00:31.419174008 +0000 UTC m=+243.963512441,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 09:00:43 crc kubenswrapper[4747]: I0202 09:00:43.745598 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" containerName="oauth-openshift" containerID="cri-o://3705a77e3fc1e727cb84efe5a3fd4b9dcd69dbb199caab6661f009357ec06b1f" gracePeriod=15 Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.207033 4747 generic.go:334] "Generic (PLEG): container finished" podID="2b43f77f-6f91-4311-a016-6fbb58510112" containerID="3705a77e3fc1e727cb84efe5a3fd4b9dcd69dbb199caab6661f009357ec06b1f" exitCode=0 Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.207142 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" event={"ID":"2b43f77f-6f91-4311-a016-6fbb58510112","Type":"ContainerDied","Data":"3705a77e3fc1e727cb84efe5a3fd4b9dcd69dbb199caab6661f009357ec06b1f"} Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.210898 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.210975 4747 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47" exitCode=1 Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.211004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47"} Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.211610 4747 scope.go:117] "RemoveContainer" containerID="1c487c07e1b377c5942fed2aadc65d143c127b1d1feb43d33539649b9cbbcd47" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.211961 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.212659 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.216893 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.217514 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.218253 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.219841 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.220448 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.310550 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.311915 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.312381 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.312738 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.313031 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.313279 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.313546 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.313927 4747 status_manager.go:851] "Failed to get status for pod" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dcbr8\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.314268 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401307 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-router-certs\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401360 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-audit-policies\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401398 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-provider-selection\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401422 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-ocp-branding-template\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401457 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-error\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-service-ca\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401510 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-login\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401697 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-session\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401756 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-idp-0-file-data\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401796 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b43f77f-6f91-4311-a016-6fbb58510112-audit-dir\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401849 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-cliconfig\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401875 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-serving-cert\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401901 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92gdb\" (UniqueName: \"kubernetes.io/projected/2b43f77f-6f91-4311-a016-6fbb58510112-kube-api-access-92gdb\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401923 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-trusted-ca-bundle\") pod \"2b43f77f-6f91-4311-a016-6fbb58510112\" (UID: \"2b43f77f-6f91-4311-a016-6fbb58510112\") " Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.401926 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2b43f77f-6f91-4311-a016-6fbb58510112-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.402291 4747 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b43f77f-6f91-4311-a016-6fbb58510112-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.402759 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.403411 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.403603 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.403619 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.408926 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b43f77f-6f91-4311-a016-6fbb58510112-kube-api-access-92gdb" (OuterVolumeSpecName: "kube-api-access-92gdb") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "kube-api-access-92gdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.410279 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.413141 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.415185 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.415585 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.416173 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.417078 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.417592 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.417900 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "2b43f77f-6f91-4311-a016-6fbb58510112" (UID: "2b43f77f-6f91-4311-a016-6fbb58510112"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503801 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503845 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503859 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503870 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503879 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503889 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503898 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503907 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503916 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503925 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503946 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503955 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92gdb\" (UniqueName: \"kubernetes.io/projected/2b43f77f-6f91-4311-a016-6fbb58510112-kube-api-access-92gdb\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.503964 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b43f77f-6f91-4311-a016-6fbb58510112-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:00:44 crc kubenswrapper[4747]: I0202 09:00:44.763477 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 09:00:45 crc kubenswrapper[4747]: E0202 09:00:45.123545 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="6.4s" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.219756 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" event={"ID":"2b43f77f-6f91-4311-a016-6fbb58510112","Type":"ContainerDied","Data":"d672d043fb9dcfdb66ac4f1ad1c267ef2e94d307ae34feacecccfa3a207c1d29"} Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.219830 4747 scope.go:117] "RemoveContainer" containerID="3705a77e3fc1e727cb84efe5a3fd4b9dcd69dbb199caab6661f009357ec06b1f" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.221719 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.222797 4747 status_manager.go:851] "Failed to get status for pod" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dcbr8\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.223215 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.223443 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.223632 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.223901 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.224172 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.224446 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.224712 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.227033 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.227084 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a410c31a92faba49c18e192f6c1b612f02aa02d3f397037ceea8c0ceb1f82a59"} Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.228111 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.228288 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.228505 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.228690 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.228846 4747 status_manager.go:851] "Failed to get status for pod" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dcbr8\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.229112 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.229513 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.229755 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.240847 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.241324 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.241621 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.241968 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.242221 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.242488 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.242799 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.243086 4747 status_manager.go:851] "Failed to get status for pod" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dcbr8\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:45 crc kubenswrapper[4747]: I0202 09:00:45.912446 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.338749 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.340342 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.341130 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.341654 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.342184 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.342518 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.342775 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.343038 4747 status_manager.go:851] "Failed to get status for pod" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dcbr8\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.343365 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.363342 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.363380 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:46 crc kubenswrapper[4747]: E0202 09:00:46.363807 4747 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:46 crc kubenswrapper[4747]: I0202 09:00:46.364318 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:46 crc kubenswrapper[4747]: W0202 09:00:46.403462 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-c73c45801be3b5e32268dc79c23f53c27f6ab5f6fb571e943629db132659b8b8 WatchSource:0}: Error finding container c73c45801be3b5e32268dc79c23f53c27f6ab5f6fb571e943629db132659b8b8: Status 404 returned error can't find the container with id c73c45801be3b5e32268dc79c23f53c27f6ab5f6fb571e943629db132659b8b8 Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.240607 4747 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="9bf7471467086b70cc0581a72fcf8ee0ed91c0fb334b7a86c0316e7ac9b55d7c" exitCode=0 Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.240708 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"9bf7471467086b70cc0581a72fcf8ee0ed91c0fb334b7a86c0316e7ac9b55d7c"} Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.241027 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c73c45801be3b5e32268dc79c23f53c27f6ab5f6fb571e943629db132659b8b8"} Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.241579 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.241603 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.242002 4747 status_manager.go:851] "Failed to get status for pod" podUID="0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a" pod="openshift-marketplace/redhat-operators-vd55r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-vd55r\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:47 crc kubenswrapper[4747]: E0202 09:00:47.242113 4747 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.242429 4747 status_manager.go:851] "Failed to get status for pod" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" pod="openshift-authentication/oauth-openshift-558db77b4-dcbr8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dcbr8\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.243217 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.243437 4747 status_manager.go:851] "Failed to get status for pod" podUID="d487e4d8-ed4a-4adf-b849-70df6155b1e4" pod="openshift-marketplace/redhat-marketplace-xhhvr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xhhvr\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.243776 4747 status_manager.go:851] "Failed to get status for pod" podUID="a977f70f-0d8f-4480-be1f-0b48d191b054" pod="openshift-marketplace/certified-operators-6bxvq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-6bxvq\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.244349 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.244636 4747 status_manager.go:851] "Failed to get status for pod" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:47 crc kubenswrapper[4747]: I0202 09:00:47.244917 4747 status_manager.go:851] "Failed to get status for pod" podUID="72a35cfa-73ba-4baf-b7c0-2947ca69a797" pod="openshift-marketplace/community-operators-8kk9w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8kk9w\": dial tcp 38.102.83.190:6443: connect: connection refused" Feb 02 09:00:48 crc kubenswrapper[4747]: I0202 09:00:48.251345 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"98c6b571d84eb72b5ad8ef6897b43c6fd40f91aa8cfcc5b84d1e7d4ac84b13f4"} Feb 02 09:00:48 crc kubenswrapper[4747]: I0202 09:00:48.251700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3082e53eca979f9bf27a6036eded9e493797d1ac1bd673de36b507d8abbead00"} Feb 02 09:00:48 crc kubenswrapper[4747]: I0202 09:00:48.251716 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8a1875e36f279a1bcc377196d928b85878f4a03440c8626d37d2d6ff380b9149"} Feb 02 09:00:48 crc kubenswrapper[4747]: I0202 09:00:48.251726 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"231120268a67c32902bfd77215745173fc4ca3fcfd5355f786f812ae069a6111"} Feb 02 09:00:49 crc kubenswrapper[4747]: I0202 09:00:49.260789 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b807ef91055141fcb910b79fe28d3cc75149ca0572240989f1514616f0c3b848"} Feb 02 09:00:49 crc kubenswrapper[4747]: I0202 09:00:49.261550 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:49 crc kubenswrapper[4747]: I0202 09:00:49.261147 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:49 crc kubenswrapper[4747]: I0202 09:00:49.261715 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:50 crc kubenswrapper[4747]: I0202 09:00:50.427618 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 09:00:50 crc kubenswrapper[4747]: I0202 09:00:50.433352 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 09:00:51 crc kubenswrapper[4747]: I0202 09:00:51.365741 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:51 crc kubenswrapper[4747]: I0202 09:00:51.366005 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:51 crc kubenswrapper[4747]: I0202 09:00:51.372706 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:54 crc kubenswrapper[4747]: I0202 09:00:54.271420 4747 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:54 crc kubenswrapper[4747]: I0202 09:00:54.299296 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:54 crc kubenswrapper[4747]: I0202 09:00:54.299326 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:54 crc kubenswrapper[4747]: I0202 09:00:54.305108 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:00:54 crc kubenswrapper[4747]: I0202 09:00:54.308231 4747 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="1322123e-7a21-436c-8c41-9a39cce63c50" Feb 02 09:00:55 crc kubenswrapper[4747]: I0202 09:00:55.303633 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:55 crc kubenswrapper[4747]: I0202 09:00:55.303662 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ff9b1fbe-12b3-49da-87e7-85c10ac955fc" Feb 02 09:00:55 crc kubenswrapper[4747]: I0202 09:00:55.917647 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 09:00:58 crc kubenswrapper[4747]: I0202 09:00:58.375555 4747 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="1322123e-7a21-436c-8c41-9a39cce63c50" Feb 02 09:01:03 crc kubenswrapper[4747]: I0202 09:01:03.391899 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 09:01:03 crc kubenswrapper[4747]: I0202 09:01:03.409530 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 09:01:03 crc kubenswrapper[4747]: I0202 09:01:03.416440 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 09:01:03 crc kubenswrapper[4747]: I0202 09:01:03.605610 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 09:01:03 crc kubenswrapper[4747]: I0202 09:01:03.638468 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.254990 4747 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.256267 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=33.256250924 podStartE2EDuration="33.256250924s" podCreationTimestamp="2026-02-02 09:00:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:00:53.897764277 +0000 UTC m=+266.442102710" watchObservedRunningTime="2026-02-02 09:01:04.256250924 +0000 UTC m=+276.800589357" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.257870 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vd55r" podStartSLOduration=35.331488932 podStartE2EDuration="38.257861014s" podCreationTimestamp="2026-02-02 09:00:26 +0000 UTC" firstStartedPulling="2026-02-02 09:00:27.036706125 +0000 UTC m=+239.581044568" lastFinishedPulling="2026-02-02 09:00:29.963078217 +0000 UTC m=+242.507416650" observedRunningTime="2026-02-02 09:00:53.952662315 +0000 UTC m=+266.497000748" watchObservedRunningTime="2026-02-02 09:01:04.257861014 +0000 UTC m=+276.802199447" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.259369 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dcbr8","openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.259420 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.263253 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.275363 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=10.27534569 podStartE2EDuration="10.27534569s" podCreationTimestamp="2026-02-02 09:00:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:01:04.274565001 +0000 UTC m=+276.818903434" watchObservedRunningTime="2026-02-02 09:01:04.27534569 +0000 UTC m=+276.819684123" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.347915 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" path="/var/lib/kubelet/pods/2b43f77f-6f91-4311-a016-6fbb58510112/volumes" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.566099 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.792299 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 09:01:04 crc kubenswrapper[4747]: I0202 09:01:04.996548 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.041474 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.050086 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.200347 4747 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.200803 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://e4d54420a38f24e3518d6b881e0c44c90751bbca1bc0191071cb7386b7923877" gracePeriod=5 Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.215438 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.260213 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.303343 4747 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.388284 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.397441 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.489176 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.502832 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.506629 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 09:01:05 crc kubenswrapper[4747]: I0202 09:01:05.869137 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 09:01:06 crc kubenswrapper[4747]: I0202 09:01:06.408363 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 09:01:06 crc kubenswrapper[4747]: I0202 09:01:06.427543 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 09:01:06 crc kubenswrapper[4747]: I0202 09:01:06.780211 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 09:01:06 crc kubenswrapper[4747]: I0202 09:01:06.847278 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 09:01:06 crc kubenswrapper[4747]: I0202 09:01:06.858816 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 09:01:06 crc kubenswrapper[4747]: I0202 09:01:06.905311 4747 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 09:01:06 crc kubenswrapper[4747]: I0202 09:01:06.989810 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 09:01:07 crc kubenswrapper[4747]: I0202 09:01:07.130886 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 09:01:07 crc kubenswrapper[4747]: I0202 09:01:07.137764 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 09:01:07 crc kubenswrapper[4747]: I0202 09:01:07.259451 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 09:01:07 crc kubenswrapper[4747]: I0202 09:01:07.300276 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 09:01:07 crc kubenswrapper[4747]: I0202 09:01:07.477307 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 09:01:07 crc kubenswrapper[4747]: I0202 09:01:07.885664 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.006724 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.268688 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.273280 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.293444 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.341336 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.360876 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.486878 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.585912 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.596997 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.700916 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 09:01:08 crc kubenswrapper[4747]: I0202 09:01:08.995268 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.010982 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.059339 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.120119 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.209119 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.228476 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.289796 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.351733 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.386036 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.408629 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.440759 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.448675 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.491427 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.538910 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.585253 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.586266 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.591264 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.597119 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.625116 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.775366 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.797458 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.877456 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.879326 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.880550 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.918515 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.959528 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 09:01:09 crc kubenswrapper[4747]: I0202 09:01:09.980426 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.039078 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.044586 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.077748 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.128307 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.157686 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.177278 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.178620 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.223171 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.251704 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.277283 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.301261 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.302026 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.303310 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.385982 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.391198 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.391240 4747 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="e4d54420a38f24e3518d6b881e0c44c90751bbca1bc0191071cb7386b7923877" exitCode=137 Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.453586 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.496284 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.581844 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.619293 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.624694 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.634535 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.771092 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.778264 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.778369 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.789639 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.801426 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.813257 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.813487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.813621 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.813693 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.814013 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.813811 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.813834 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.813845 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.814078 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.814437 4747 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.815016 4747 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.815122 4747 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.815181 4747 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.822945 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.838628 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.917416 4747 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 09:01:10 crc kubenswrapper[4747]: I0202 09:01:10.933874 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.102887 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.370354 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.395618 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.396758 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.396827 4747 scope.go:117] "RemoveContainer" containerID="e4d54420a38f24e3518d6b881e0c44c90751bbca1bc0191071cb7386b7923877" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.396878 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.405892 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.439759 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.442192 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.463373 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.473824 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.534078 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.653547 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.684007 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.690748 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.738817 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.784000 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.845664 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.849750 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.878439 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.887938 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.903413 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 09:01:11 crc kubenswrapper[4747]: I0202 09:01:11.937990 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.048196 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.110406 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.143573 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.154050 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.298065 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.310630 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.346109 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.346686 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.355492 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.355536 4747 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="4652bbb8-637b-4ebd-b904-956c2f3684cc" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.359006 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.359163 4747 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="4652bbb8-637b-4ebd-b904-956c2f3684cc" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.394511 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.488991 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.536375 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.560764 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.585818 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.587772 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.678834 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.719109 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.771698 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.783616 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 09:01:12 crc kubenswrapper[4747]: I0202 09:01:12.935095 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.139333 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.140372 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.399441 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.434934 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.449359 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.449477 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.492772 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.542908 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.605455 4747 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.631848 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.664811 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.702781 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.720109 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.744653 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.762848 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.859795 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.885019 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.891991 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.897899 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 09:01:13 crc kubenswrapper[4747]: I0202 09:01:13.997714 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.132188 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.235878 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.327889 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.337203 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.402486 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.437386 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479005 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-dp9nf"] Feb 02 09:01:14 crc kubenswrapper[4747]: E0202 09:01:14.479194 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" containerName="oauth-openshift" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479206 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" containerName="oauth-openshift" Feb 02 09:01:14 crc kubenswrapper[4747]: E0202 09:01:14.479214 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" containerName="installer" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479220 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" containerName="installer" Feb 02 09:01:14 crc kubenswrapper[4747]: E0202 09:01:14.479232 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479238 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479314 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479329 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b43f77f-6f91-4311-a016-6fbb58510112" containerName="oauth-openshift" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479336 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aa9c57b-f5cb-4a99-be93-8554b4ba2cbf" containerName="installer" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.479758 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.485237 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.485242 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.485305 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.485474 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.485909 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.486231 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.486496 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.486734 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.486880 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.489219 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.489563 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.489737 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.494479 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.496192 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.500475 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.559604 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664094 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664160 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbnr4\" (UniqueName: \"kubernetes.io/projected/9770fc89-c452-4211-9f67-e2cc516b8756-kube-api-access-dbnr4\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664181 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664200 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664237 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9770fc89-c452-4211-9f67-e2cc516b8756-audit-dir\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664255 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664270 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664322 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664371 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664391 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664416 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-audit-policies\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664480 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664505 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.664547 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765605 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765643 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbnr4\" (UniqueName: \"kubernetes.io/projected/9770fc89-c452-4211-9f67-e2cc516b8756-kube-api-access-dbnr4\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765664 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765688 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765710 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9770fc89-c452-4211-9f67-e2cc516b8756-audit-dir\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765731 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765751 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765773 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765853 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765882 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765912 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-audit-policies\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765958 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.765983 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.766182 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9770fc89-c452-4211-9f67-e2cc516b8756-audit-dir\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.766558 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.767171 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.767192 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-audit-policies\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.767172 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.772598 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.772699 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.773028 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.773562 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.773613 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.774252 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.774369 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.776331 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9770fc89-c452-4211-9f67-e2cc516b8756-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.784890 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbnr4\" (UniqueName: \"kubernetes.io/projected/9770fc89-c452-4211-9f67-e2cc516b8756-kube-api-access-dbnr4\") pod \"oauth-openshift-58d4f98775-dp9nf\" (UID: \"9770fc89-c452-4211-9f67-e2cc516b8756\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.797655 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:14 crc kubenswrapper[4747]: I0202 09:01:14.938178 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.052851 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.070177 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.171993 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.263216 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.264731 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.272245 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.349298 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.485544 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.492527 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.518129 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.602077 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.608144 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.626143 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.667344 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.692161 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.708678 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.733757 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 09:01:15 crc kubenswrapper[4747]: I0202 09:01:15.984119 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.156489 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.199193 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.212064 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.236834 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.265565 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.271183 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.329821 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.335781 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.346598 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.503736 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.579743 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.593055 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.633927 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.662080 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.713353 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.824258 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 09:01:16 crc kubenswrapper[4747]: I0202 09:01:16.989757 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.002295 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.210285 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.358153 4747 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.609495 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.609643 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.612225 4747 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.634009 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.657366 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.792185 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.914213 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.924825 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.970874 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 09:01:17 crc kubenswrapper[4747]: I0202 09:01:17.983180 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.048641 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.049707 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.060458 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.091721 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.164604 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.164698 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.179320 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.200910 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.313893 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.449961 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.586234 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.646015 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.747147 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.813558 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.821174 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 09:01:18 crc kubenswrapper[4747]: I0202 09:01:18.869656 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 09:01:19 crc kubenswrapper[4747]: I0202 09:01:19.303796 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 09:01:19 crc kubenswrapper[4747]: I0202 09:01:19.390285 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 09:01:19 crc kubenswrapper[4747]: I0202 09:01:19.618291 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 09:01:20 crc kubenswrapper[4747]: I0202 09:01:20.224961 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 09:01:20 crc kubenswrapper[4747]: I0202 09:01:20.742544 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 09:01:28 crc kubenswrapper[4747]: I0202 09:01:28.090826 4747 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 02 09:01:29 crc kubenswrapper[4747]: I0202 09:01:29.456798 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 09:01:38 crc kubenswrapper[4747]: I0202 09:01:38.946464 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-dp9nf"] Feb 02 09:01:39 crc kubenswrapper[4747]: I0202 09:01:39.129638 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-dp9nf"] Feb 02 09:01:39 crc kubenswrapper[4747]: I0202 09:01:39.619770 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" event={"ID":"9770fc89-c452-4211-9f67-e2cc516b8756","Type":"ContainerStarted","Data":"916b0f47e8f8fda10110f77c1342f817adb8cadd3a48eda44b4d6ba4b5b32505"} Feb 02 09:01:39 crc kubenswrapper[4747]: I0202 09:01:39.620177 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" event={"ID":"9770fc89-c452-4211-9f67-e2cc516b8756","Type":"ContainerStarted","Data":"54ecfcb07e4353c3d758c3c84b3d51a86788bf3e2607e85a9b3c26c827f1b082"} Feb 02 09:01:39 crc kubenswrapper[4747]: I0202 09:01:39.620619 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:39 crc kubenswrapper[4747]: I0202 09:01:39.640647 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" podStartSLOduration=81.64062833 podStartE2EDuration="1m21.64062833s" podCreationTimestamp="2026-02-02 09:00:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:01:39.639961544 +0000 UTC m=+312.184299977" watchObservedRunningTime="2026-02-02 09:01:39.64062833 +0000 UTC m=+312.184966763" Feb 02 09:01:39 crc kubenswrapper[4747]: I0202 09:01:39.696830 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 09:01:40 crc kubenswrapper[4747]: I0202 09:01:40.087671 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-58d4f98775-dp9nf" Feb 02 09:01:45 crc kubenswrapper[4747]: I0202 09:01:45.997403 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 09:01:48 crc kubenswrapper[4747]: I0202 09:01:48.196782 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 09:01:48 crc kubenswrapper[4747]: I0202 09:01:48.459675 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 09:01:49 crc kubenswrapper[4747]: I0202 09:01:49.185209 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 09:01:50 crc kubenswrapper[4747]: I0202 09:01:50.659036 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 09:02:01 crc kubenswrapper[4747]: I0202 09:02:01.259655 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 09:02:20 crc kubenswrapper[4747]: I0202 09:02:20.518389 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:02:20 crc kubenswrapper[4747]: I0202 09:02:20.520143 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:02:50 crc kubenswrapper[4747]: I0202 09:02:50.519145 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:02:50 crc kubenswrapper[4747]: I0202 09:02:50.519907 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.470074 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-tmhqk"] Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.471182 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.521233 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-tmhqk"] Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.666953 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvr8h\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-kube-api-access-kvr8h\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.667303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-registry-tls\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.667340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9477c75-26a3-41a2-84da-c823bc45da26-ca-trust-extracted\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.667367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9477c75-26a3-41a2-84da-c823bc45da26-registry-certificates\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.667388 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9477c75-26a3-41a2-84da-c823bc45da26-installation-pull-secrets\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.667419 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9477c75-26a3-41a2-84da-c823bc45da26-trusted-ca\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.667507 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-bound-sa-token\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.667538 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.706147 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.769172 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9477c75-26a3-41a2-84da-c823bc45da26-installation-pull-secrets\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.769231 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9477c75-26a3-41a2-84da-c823bc45da26-trusted-ca\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.769287 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-bound-sa-token\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.769315 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvr8h\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-kube-api-access-kvr8h\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.769332 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-registry-tls\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.769351 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9477c75-26a3-41a2-84da-c823bc45da26-ca-trust-extracted\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.769369 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9477c75-26a3-41a2-84da-c823bc45da26-registry-certificates\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.770374 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b9477c75-26a3-41a2-84da-c823bc45da26-ca-trust-extracted\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.771060 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b9477c75-26a3-41a2-84da-c823bc45da26-registry-certificates\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.772483 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9477c75-26a3-41a2-84da-c823bc45da26-trusted-ca\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.775650 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b9477c75-26a3-41a2-84da-c823bc45da26-installation-pull-secrets\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.778758 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-registry-tls\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.785860 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-bound-sa-token\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.787756 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvr8h\" (UniqueName: \"kubernetes.io/projected/b9477c75-26a3-41a2-84da-c823bc45da26-kube-api-access-kvr8h\") pod \"image-registry-66df7c8f76-tmhqk\" (UID: \"b9477c75-26a3-41a2-84da-c823bc45da26\") " pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:52 crc kubenswrapper[4747]: I0202 09:02:52.792304 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:53 crc kubenswrapper[4747]: I0202 09:02:53.008130 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-tmhqk"] Feb 02 09:02:53 crc kubenswrapper[4747]: I0202 09:02:53.977025 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" event={"ID":"b9477c75-26a3-41a2-84da-c823bc45da26","Type":"ContainerStarted","Data":"f183368c03100912e2a048926f44c9fbf559073028f4e2d2f450c48df376b44a"} Feb 02 09:02:53 crc kubenswrapper[4747]: I0202 09:02:53.977377 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" event={"ID":"b9477c75-26a3-41a2-84da-c823bc45da26","Type":"ContainerStarted","Data":"8096b9e90acdad54f277fe6b7ae49b40563323fb5e585150ce4870042376cb19"} Feb 02 09:02:53 crc kubenswrapper[4747]: I0202 09:02:53.977403 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:02:54 crc kubenswrapper[4747]: I0202 09:02:54.006500 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" podStartSLOduration=2.006483273 podStartE2EDuration="2.006483273s" podCreationTimestamp="2026-02-02 09:02:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:02:54.003450738 +0000 UTC m=+386.547789191" watchObservedRunningTime="2026-02-02 09:02:54.006483273 +0000 UTC m=+386.550821706" Feb 02 09:03:12 crc kubenswrapper[4747]: I0202 09:03:12.798365 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-tmhqk" Feb 02 09:03:12 crc kubenswrapper[4747]: I0202 09:03:12.857939 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-25ddd"] Feb 02 09:03:20 crc kubenswrapper[4747]: I0202 09:03:20.518315 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:03:20 crc kubenswrapper[4747]: I0202 09:03:20.518787 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:03:20 crc kubenswrapper[4747]: I0202 09:03:20.518827 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:03:20 crc kubenswrapper[4747]: I0202 09:03:20.519353 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7cee0f9aad4ab5e00f2a9b56f3dbf6ac0c95c1aa267e52c4fe110c7bcef4e605"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:03:20 crc kubenswrapper[4747]: I0202 09:03:20.519415 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://7cee0f9aad4ab5e00f2a9b56f3dbf6ac0c95c1aa267e52c4fe110c7bcef4e605" gracePeriod=600 Feb 02 09:03:21 crc kubenswrapper[4747]: I0202 09:03:21.134987 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="7cee0f9aad4ab5e00f2a9b56f3dbf6ac0c95c1aa267e52c4fe110c7bcef4e605" exitCode=0 Feb 02 09:03:21 crc kubenswrapper[4747]: I0202 09:03:21.135024 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"7cee0f9aad4ab5e00f2a9b56f3dbf6ac0c95c1aa267e52c4fe110c7bcef4e605"} Feb 02 09:03:21 crc kubenswrapper[4747]: I0202 09:03:21.135081 4747 scope.go:117] "RemoveContainer" containerID="b52074b9589776612f98db84ab45f25ec1ad6cd13040073a74dcb713fd5adc6c" Feb 02 09:03:22 crc kubenswrapper[4747]: I0202 09:03:22.142886 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"9040071d110153768f64a97f2d933e497b3389e6abc689c7c55f4c1a81e9d70b"} Feb 02 09:03:37 crc kubenswrapper[4747]: I0202 09:03:37.901835 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" podUID="943f883b-9752-44f0-b3ba-845f53d4b86e" containerName="registry" containerID="cri-o://56e6a5bc39fc32e9958b5ffa7f60bc829c13b6f09dc244260a8ed9d10ec3b0a3" gracePeriod=30 Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.235616 4747 generic.go:334] "Generic (PLEG): container finished" podID="943f883b-9752-44f0-b3ba-845f53d4b86e" containerID="56e6a5bc39fc32e9958b5ffa7f60bc829c13b6f09dc244260a8ed9d10ec3b0a3" exitCode=0 Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.235723 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" event={"ID":"943f883b-9752-44f0-b3ba-845f53d4b86e","Type":"ContainerDied","Data":"56e6a5bc39fc32e9958b5ffa7f60bc829c13b6f09dc244260a8ed9d10ec3b0a3"} Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.235973 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" event={"ID":"943f883b-9752-44f0-b3ba-845f53d4b86e","Type":"ContainerDied","Data":"86203a089a7a9caf15176b8ea771ce5efe362462f9e1f6bc75933dce7b13a868"} Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.235997 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86203a089a7a9caf15176b8ea771ce5efe362462f9e1f6bc75933dce7b13a868" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.245924 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.342752 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-bound-sa-token\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.342908 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.342979 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-trusted-ca\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.343020 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdfkx\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-kube-api-access-xdfkx\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.343063 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-tls\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.343091 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-certificates\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.343155 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/943f883b-9752-44f0-b3ba-845f53d4b86e-ca-trust-extracted\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.343254 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/943f883b-9752-44f0-b3ba-845f53d4b86e-installation-pull-secrets\") pod \"943f883b-9752-44f0-b3ba-845f53d4b86e\" (UID: \"943f883b-9752-44f0-b3ba-845f53d4b86e\") " Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.344220 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.344250 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.349865 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/943f883b-9752-44f0-b3ba-845f53d4b86e-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.352094 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-kube-api-access-xdfkx" (OuterVolumeSpecName: "kube-api-access-xdfkx") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "kube-api-access-xdfkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.358503 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.358986 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.359411 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/943f883b-9752-44f0-b3ba-845f53d4b86e-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.362476 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "943f883b-9752-44f0-b3ba-845f53d4b86e" (UID: "943f883b-9752-44f0-b3ba-845f53d4b86e"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.445309 4747 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/943f883b-9752-44f0-b3ba-845f53d4b86e-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.445369 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.445383 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.445396 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdfkx\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-kube-api-access-xdfkx\") on node \"crc\" DevicePath \"\"" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.445408 4747 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.445418 4747 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/943f883b-9752-44f0-b3ba-845f53d4b86e-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 09:03:38 crc kubenswrapper[4747]: I0202 09:03:38.445453 4747 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/943f883b-9752-44f0-b3ba-845f53d4b86e-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 09:03:39 crc kubenswrapper[4747]: I0202 09:03:39.240731 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-25ddd" Feb 02 09:03:39 crc kubenswrapper[4747]: I0202 09:03:39.282573 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-25ddd"] Feb 02 09:03:39 crc kubenswrapper[4747]: I0202 09:03:39.286563 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-25ddd"] Feb 02 09:03:40 crc kubenswrapper[4747]: I0202 09:03:40.347191 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="943f883b-9752-44f0-b3ba-845f53d4b86e" path="/var/lib/kubelet/pods/943f883b-9752-44f0-b3ba-845f53d4b86e/volumes" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.325377 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-64kvl"] Feb 02 09:05:21 crc kubenswrapper[4747]: E0202 09:05:21.326275 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="943f883b-9752-44f0-b3ba-845f53d4b86e" containerName="registry" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.326292 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="943f883b-9752-44f0-b3ba-845f53d4b86e" containerName="registry" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.326441 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="943f883b-9752-44f0-b3ba-845f53d4b86e" containerName="registry" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.327001 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.330209 4747 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-ftfxs" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.330463 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.330503 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.336977 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-lgx2r"] Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.337713 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-lgx2r" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.342665 4747 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-bpl78" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.347574 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-lgx2r"] Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.352135 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-64kvl"] Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.353412 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-lrtlk"] Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.354228 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.357977 4747 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-gncnf" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.362559 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-lrtlk"] Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.449845 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg46z\" (UniqueName: \"kubernetes.io/projected/71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7-kube-api-access-pg46z\") pod \"cert-manager-cainjector-cf98fcc89-64kvl\" (UID: \"71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.450246 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p56zq\" (UniqueName: \"kubernetes.io/projected/918034e7-cc57-4704-9891-5c2405668e2e-kube-api-access-p56zq\") pod \"cert-manager-webhook-687f57d79b-lrtlk\" (UID: \"918034e7-cc57-4704-9891-5c2405668e2e\") " pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.450294 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvp5c\" (UniqueName: \"kubernetes.io/projected/7a38108b-02cd-4489-88df-5b532a16b031-kube-api-access-pvp5c\") pod \"cert-manager-858654f9db-lgx2r\" (UID: \"7a38108b-02cd-4489-88df-5b532a16b031\") " pod="cert-manager/cert-manager-858654f9db-lgx2r" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.551750 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p56zq\" (UniqueName: \"kubernetes.io/projected/918034e7-cc57-4704-9891-5c2405668e2e-kube-api-access-p56zq\") pod \"cert-manager-webhook-687f57d79b-lrtlk\" (UID: \"918034e7-cc57-4704-9891-5c2405668e2e\") " pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.551795 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvp5c\" (UniqueName: \"kubernetes.io/projected/7a38108b-02cd-4489-88df-5b532a16b031-kube-api-access-pvp5c\") pod \"cert-manager-858654f9db-lgx2r\" (UID: \"7a38108b-02cd-4489-88df-5b532a16b031\") " pod="cert-manager/cert-manager-858654f9db-lgx2r" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.551834 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg46z\" (UniqueName: \"kubernetes.io/projected/71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7-kube-api-access-pg46z\") pod \"cert-manager-cainjector-cf98fcc89-64kvl\" (UID: \"71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.568897 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p56zq\" (UniqueName: \"kubernetes.io/projected/918034e7-cc57-4704-9891-5c2405668e2e-kube-api-access-p56zq\") pod \"cert-manager-webhook-687f57d79b-lrtlk\" (UID: \"918034e7-cc57-4704-9891-5c2405668e2e\") " pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.572306 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvp5c\" (UniqueName: \"kubernetes.io/projected/7a38108b-02cd-4489-88df-5b532a16b031-kube-api-access-pvp5c\") pod \"cert-manager-858654f9db-lgx2r\" (UID: \"7a38108b-02cd-4489-88df-5b532a16b031\") " pod="cert-manager/cert-manager-858654f9db-lgx2r" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.576752 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg46z\" (UniqueName: \"kubernetes.io/projected/71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7-kube-api-access-pg46z\") pod \"cert-manager-cainjector-cf98fcc89-64kvl\" (UID: \"71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.641850 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.658779 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-lgx2r" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.673160 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.856194 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-lgx2r"] Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.869893 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:05:21 crc kubenswrapper[4747]: I0202 09:05:21.893732 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-64kvl"] Feb 02 09:05:21 crc kubenswrapper[4747]: W0202 09:05:21.895102 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71ba9c61_d5e8_4688_8b1b_3eaaa5f2d6a7.slice/crio-f7c07c927739dde78fb2729894d7a80fc486ce40ed60abc1c46533ca959ee7e6 WatchSource:0}: Error finding container f7c07c927739dde78fb2729894d7a80fc486ce40ed60abc1c46533ca959ee7e6: Status 404 returned error can't find the container with id f7c07c927739dde78fb2729894d7a80fc486ce40ed60abc1c46533ca959ee7e6 Feb 02 09:05:22 crc kubenswrapper[4747]: I0202 09:05:22.119684 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-lgx2r" event={"ID":"7a38108b-02cd-4489-88df-5b532a16b031","Type":"ContainerStarted","Data":"a5177009ea242dcbfb9560d191ec633456d881439f4ca6384412967ece9c2dc1"} Feb 02 09:05:22 crc kubenswrapper[4747]: I0202 09:05:22.122303 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" event={"ID":"71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7","Type":"ContainerStarted","Data":"f7c07c927739dde78fb2729894d7a80fc486ce40ed60abc1c46533ca959ee7e6"} Feb 02 09:05:22 crc kubenswrapper[4747]: I0202 09:05:22.141515 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-lrtlk"] Feb 02 09:05:22 crc kubenswrapper[4747]: W0202 09:05:22.146171 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod918034e7_cc57_4704_9891_5c2405668e2e.slice/crio-642be1976c364d676813b770c24a3456cefc56f1db2b4ad5e751e81088fa1ae6 WatchSource:0}: Error finding container 642be1976c364d676813b770c24a3456cefc56f1db2b4ad5e751e81088fa1ae6: Status 404 returned error can't find the container with id 642be1976c364d676813b770c24a3456cefc56f1db2b4ad5e751e81088fa1ae6 Feb 02 09:05:23 crc kubenswrapper[4747]: I0202 09:05:23.131954 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" event={"ID":"918034e7-cc57-4704-9891-5c2405668e2e","Type":"ContainerStarted","Data":"642be1976c364d676813b770c24a3456cefc56f1db2b4ad5e751e81088fa1ae6"} Feb 02 09:05:26 crc kubenswrapper[4747]: I0202 09:05:26.153598 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" event={"ID":"71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7","Type":"ContainerStarted","Data":"73981c16905128c5627a98e170d9ee97a7024cf2317ca10481f2110de723def6"} Feb 02 09:05:26 crc kubenswrapper[4747]: I0202 09:05:26.155769 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" event={"ID":"918034e7-cc57-4704-9891-5c2405668e2e","Type":"ContainerStarted","Data":"e282919c9167a225055d5eef54056b8ebfd92f58aadfe9aaedbd5b107b38c065"} Feb 02 09:05:26 crc kubenswrapper[4747]: I0202 09:05:26.155911 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" Feb 02 09:05:26 crc kubenswrapper[4747]: I0202 09:05:26.157047 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-lgx2r" event={"ID":"7a38108b-02cd-4489-88df-5b532a16b031","Type":"ContainerStarted","Data":"88e0ad9e7696f13a409962b5499a2f84052a25ebad010c3923bc578174dda830"} Feb 02 09:05:26 crc kubenswrapper[4747]: I0202 09:05:26.214007 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-64kvl" podStartSLOduration=1.259722951 podStartE2EDuration="5.213988115s" podCreationTimestamp="2026-02-02 09:05:21 +0000 UTC" firstStartedPulling="2026-02-02 09:05:21.89778128 +0000 UTC m=+534.442119713" lastFinishedPulling="2026-02-02 09:05:25.852046444 +0000 UTC m=+538.396384877" observedRunningTime="2026-02-02 09:05:26.187329256 +0000 UTC m=+538.731667729" watchObservedRunningTime="2026-02-02 09:05:26.213988115 +0000 UTC m=+538.758326568" Feb 02 09:05:26 crc kubenswrapper[4747]: I0202 09:05:26.242420 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" podStartSLOduration=1.550958066 podStartE2EDuration="5.242400067s" podCreationTimestamp="2026-02-02 09:05:21 +0000 UTC" firstStartedPulling="2026-02-02 09:05:22.149069288 +0000 UTC m=+534.693407731" lastFinishedPulling="2026-02-02 09:05:25.840511309 +0000 UTC m=+538.384849732" observedRunningTime="2026-02-02 09:05:26.219247125 +0000 UTC m=+538.763585578" watchObservedRunningTime="2026-02-02 09:05:26.242400067 +0000 UTC m=+538.786738520" Feb 02 09:05:26 crc kubenswrapper[4747]: I0202 09:05:26.244465 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-lgx2r" podStartSLOduration=1.265958975 podStartE2EDuration="5.244456077s" podCreationTimestamp="2026-02-02 09:05:21 +0000 UTC" firstStartedPulling="2026-02-02 09:05:21.869689406 +0000 UTC m=+534.414027829" lastFinishedPulling="2026-02-02 09:05:25.848186488 +0000 UTC m=+538.392524931" observedRunningTime="2026-02-02 09:05:26.240795477 +0000 UTC m=+538.785133920" watchObservedRunningTime="2026-02-02 09:05:26.244456077 +0000 UTC m=+538.788794520" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.203818 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7782"] Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.204752 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-controller" containerID="cri-o://998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.205106 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="sbdb" containerID="cri-o://8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.205140 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="nbdb" containerID="cri-o://18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.205173 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="northd" containerID="cri-o://d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.205211 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.205261 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-node" containerID="cri-o://df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.205294 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-acl-logging" containerID="cri-o://70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.250073 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" containerID="cri-o://d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" gracePeriod=30 Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.537729 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/3.log" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.541358 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovn-acl-logging/0.log" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.542205 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovn-controller/0.log" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.542919 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586167 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-systemd\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586207 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-slash\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586227 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-var-lib-openvswitch\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586273 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc4tt\" (UniqueName: \"kubernetes.io/projected/b3dfe801-a30e-4352-bec1-869e46ad5f0a-kube-api-access-kc4tt\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586302 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-script-lib\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586316 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-netns\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586329 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-openvswitch\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586366 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-bin\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586383 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovn-node-metrics-cert\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586359 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-slash" (OuterVolumeSpecName: "host-slash") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586399 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-systemd-units\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586539 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-ovn-kubernetes\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586648 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-etc-openvswitch\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586716 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-kubelet\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586777 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-env-overrides\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586826 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-ovn\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586870 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-log-socket\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586921 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-config\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586999 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587057 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-netd\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587124 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-node-log\") pod \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\" (UID: \"b3dfe801-a30e-4352-bec1-869e46ad5f0a\") " Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586428 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586444 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587635 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.586573 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587236 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587271 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587655 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587667 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-log-socket" (OuterVolumeSpecName: "log-socket") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587309 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587682 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587602 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587608 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587699 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587715 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587729 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-node-log" (OuterVolumeSpecName: "node-log") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587794 4747 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-slash\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.587962 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.593305 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3dfe801-a30e-4352-bec1-869e46ad5f0a-kube-api-access-kc4tt" (OuterVolumeSpecName: "kube-api-access-kc4tt") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "kube-api-access-kc4tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.593810 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.603746 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "b3dfe801-a30e-4352-bec1-869e46ad5f0a" (UID: "b3dfe801-a30e-4352-bec1-869e46ad5f0a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.608827 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-n2xc9"] Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.609212 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-node" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.609315 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-node" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.609389 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="nbdb" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.609476 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="nbdb" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.609568 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.609646 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.609719 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.609786 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.609861 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="sbdb" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.609949 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="sbdb" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.610032 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="northd" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.610102 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="northd" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.610187 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.610257 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.610349 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.610414 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.610485 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-acl-logging" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.610550 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-acl-logging" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.610661 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.610729 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.610798 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.610867 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.610972 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kubecfg-setup" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611049 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kubecfg-setup" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611264 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-node" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611354 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611440 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="northd" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611510 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611586 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611672 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611747 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="sbdb" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611820 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.611892 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovn-acl-logging" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.612007 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="nbdb" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.612088 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: E0202 09:05:31.612269 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.612341 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.612548 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerName="ovnkube-controller" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.614440 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.675845 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-lrtlk" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.688912 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-log-socket\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.689293 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-var-lib-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.689492 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-kubelet\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.689651 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-cni-netd\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.689797 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-ovn\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.689975 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdk9z\" (UniqueName: \"kubernetes.io/projected/6e309b88-c22c-434e-a7a4-86af050e55f8-kube-api-access-tdk9z\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.690146 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-slash\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.690273 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-cni-bin\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.690385 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-env-overrides\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.690600 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.690770 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-ovnkube-config\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.690898 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-run-netns\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691038 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-systemd-units\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691147 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-run-ovn-kubernetes\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691255 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691381 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-systemd\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691504 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6e309b88-c22c-434e-a7a4-86af050e55f8-ovn-node-metrics-cert\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691629 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-node-log\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691797 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-ovnkube-script-lib\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.691962 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-etc-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692139 4747 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692246 4747 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-log-socket\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692321 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692400 4747 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692479 4747 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692552 4747 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-node-log\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692785 4747 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692870 4747 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.692971 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc4tt\" (UniqueName: \"kubernetes.io/projected/b3dfe801-a30e-4352-bec1-869e46ad5f0a-kube-api-access-kc4tt\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693072 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693158 4747 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693238 4747 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693329 4747 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693406 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3dfe801-a30e-4352-bec1-869e46ad5f0a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693506 4747 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693586 4747 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693663 4747 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693743 4747 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b3dfe801-a30e-4352-bec1-869e46ad5f0a-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.693823 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3dfe801-a30e-4352-bec1-869e46ad5f0a-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-var-lib-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794612 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-kubelet\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794637 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-cni-netd\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794653 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-ovn\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794670 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdk9z\" (UniqueName: \"kubernetes.io/projected/6e309b88-c22c-434e-a7a4-86af050e55f8-kube-api-access-tdk9z\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794687 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-slash\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794705 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-cni-bin\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794723 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-env-overrides\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794756 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794778 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-ovnkube-config\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794799 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-run-netns\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794821 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-systemd-units\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794841 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-run-ovn-kubernetes\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794858 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794877 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-systemd\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794892 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6e309b88-c22c-434e-a7a4-86af050e55f8-ovn-node-metrics-cert\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794910 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-node-log\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794962 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-ovnkube-script-lib\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794980 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-etc-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.794997 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-log-socket\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795074 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-log-socket\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795111 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-var-lib-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795132 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-systemd-units\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795151 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-run-ovn-kubernetes\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795153 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-run-netns\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795171 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795188 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-systemd\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795210 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-cni-bin\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795235 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795255 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-slash\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795296 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-kubelet\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795276 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-host-cni-netd\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795320 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-node-log\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-run-ovn\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795388 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e309b88-c22c-434e-a7a4-86af050e55f8-etc-openvswitch\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.795972 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-ovnkube-config\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.796041 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-ovnkube-script-lib\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.796081 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6e309b88-c22c-434e-a7a4-86af050e55f8-env-overrides\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.798722 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6e309b88-c22c-434e-a7a4-86af050e55f8-ovn-node-metrics-cert\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.810539 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdk9z\" (UniqueName: \"kubernetes.io/projected/6e309b88-c22c-434e-a7a4-86af050e55f8-kube-api-access-tdk9z\") pod \"ovnkube-node-n2xc9\" (UID: \"6e309b88-c22c-434e-a7a4-86af050e55f8\") " pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: I0202 09:05:31.931711 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:31 crc kubenswrapper[4747]: W0202 09:05:31.953501 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e309b88_c22c_434e_a7a4_86af050e55f8.slice/crio-c86463dc24479737f78c5eb2f3641c95874045605943cc2b4ce7c4a3216726e5 WatchSource:0}: Error finding container c86463dc24479737f78c5eb2f3641c95874045605943cc2b4ce7c4a3216726e5: Status 404 returned error can't find the container with id c86463dc24479737f78c5eb2f3641c95874045605943cc2b4ce7c4a3216726e5 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.190886 4747 generic.go:334] "Generic (PLEG): container finished" podID="6e309b88-c22c-434e-a7a4-86af050e55f8" containerID="dafab8f395ead8aac1559d4aea1c8db1de3d4712816c57b80ed6f16be09942a3" exitCode=0 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.190961 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerDied","Data":"dafab8f395ead8aac1559d4aea1c8db1de3d4712816c57b80ed6f16be09942a3"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.191024 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"c86463dc24479737f78c5eb2f3641c95874045605943cc2b4ce7c4a3216726e5"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.193417 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/2.log" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.194001 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/1.log" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.194058 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1945e48-8aba-4a55-8dce-18e4a87ce4c5" containerID="2cff78528477bd23523c39147d9b5f1e1c358354af733c79baed7b8ed2791b9d" exitCode=2 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.194117 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerDied","Data":"2cff78528477bd23523c39147d9b5f1e1c358354af733c79baed7b8ed2791b9d"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.194182 4747 scope.go:117] "RemoveContainer" containerID="2b72b6cd4cf73f73a6df5f643b99cbf1abef90baf944b2352d157405d8422e60" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.194881 4747 scope.go:117] "RemoveContainer" containerID="2cff78528477bd23523c39147d9b5f1e1c358354af733c79baed7b8ed2791b9d" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.195220 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pvnm7_openshift-multus(b1945e48-8aba-4a55-8dce-18e4a87ce4c5)\"" pod="openshift-multus/multus-pvnm7" podUID="b1945e48-8aba-4a55-8dce-18e4a87ce4c5" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.197835 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovnkube-controller/3.log" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.200534 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovn-acl-logging/0.log" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201192 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7782_b3dfe801-a30e-4352-bec1-869e46ad5f0a/ovn-controller/0.log" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201702 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" exitCode=0 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201742 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" exitCode=0 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201753 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" exitCode=0 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201747 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201793 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201810 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201823 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201762 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" exitCode=0 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201835 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201847 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" exitCode=0 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201861 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" exitCode=0 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201870 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" exitCode=143 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201877 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" containerID="998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" exitCode=143 Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201896 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201905 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201915 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201926 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201931 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201951 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201956 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201962 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201967 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201981 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201987 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.201992 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202000 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202009 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202020 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202027 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202033 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202038 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202043 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202048 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202053 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202058 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202063 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202070 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202079 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202095 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202100 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202106 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202111 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202116 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202122 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202127 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202132 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202137 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202143 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7782" event={"ID":"b3dfe801-a30e-4352-bec1-869e46ad5f0a","Type":"ContainerDied","Data":"d7ef61205d215896237bde40b95364cf02d3cb48d352942dbbfb38d36315cbb9"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202151 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202158 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202164 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202169 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202175 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202180 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202187 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202197 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202207 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.202213 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.232552 4747 scope.go:117] "RemoveContainer" containerID="d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.249807 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.272055 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7782"] Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.273028 4747 scope.go:117] "RemoveContainer" containerID="8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.282897 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7782"] Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.294070 4747 scope.go:117] "RemoveContainer" containerID="18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.310069 4747 scope.go:117] "RemoveContainer" containerID="d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.330869 4747 scope.go:117] "RemoveContainer" containerID="cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.347572 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3dfe801-a30e-4352-bec1-869e46ad5f0a" path="/var/lib/kubelet/pods/b3dfe801-a30e-4352-bec1-869e46ad5f0a/volumes" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.378832 4747 scope.go:117] "RemoveContainer" containerID="df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.395762 4747 scope.go:117] "RemoveContainer" containerID="70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.407956 4747 scope.go:117] "RemoveContainer" containerID="998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.422973 4747 scope.go:117] "RemoveContainer" containerID="bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.441448 4747 scope.go:117] "RemoveContainer" containerID="d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.442110 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": container with ID starting with d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d not found: ID does not exist" containerID="d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.442149 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} err="failed to get container status \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": rpc error: code = NotFound desc = could not find container \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": container with ID starting with d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.442204 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.442806 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": container with ID starting with fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266 not found: ID does not exist" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.442896 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} err="failed to get container status \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": rpc error: code = NotFound desc = could not find container \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": container with ID starting with fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.442991 4747 scope.go:117] "RemoveContainer" containerID="8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.443566 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": container with ID starting with 8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5 not found: ID does not exist" containerID="8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.443624 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} err="failed to get container status \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": rpc error: code = NotFound desc = could not find container \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": container with ID starting with 8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.443655 4747 scope.go:117] "RemoveContainer" containerID="18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.444001 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": container with ID starting with 18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774 not found: ID does not exist" containerID="18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.444092 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} err="failed to get container status \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": rpc error: code = NotFound desc = could not find container \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": container with ID starting with 18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.444218 4747 scope.go:117] "RemoveContainer" containerID="d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.445486 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": container with ID starting with d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347 not found: ID does not exist" containerID="d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.445510 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} err="failed to get container status \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": rpc error: code = NotFound desc = could not find container \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": container with ID starting with d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.445525 4747 scope.go:117] "RemoveContainer" containerID="cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.447092 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": container with ID starting with cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94 not found: ID does not exist" containerID="cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.447143 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} err="failed to get container status \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": rpc error: code = NotFound desc = could not find container \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": container with ID starting with cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.447171 4747 scope.go:117] "RemoveContainer" containerID="df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.447435 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": container with ID starting with df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e not found: ID does not exist" containerID="df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.447538 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} err="failed to get container status \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": rpc error: code = NotFound desc = could not find container \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": container with ID starting with df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.447612 4747 scope.go:117] "RemoveContainer" containerID="70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.448229 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": container with ID starting with 70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac not found: ID does not exist" containerID="70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.448312 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} err="failed to get container status \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": rpc error: code = NotFound desc = could not find container \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": container with ID starting with 70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.448377 4747 scope.go:117] "RemoveContainer" containerID="998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.448727 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": container with ID starting with 998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176 not found: ID does not exist" containerID="998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.448755 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} err="failed to get container status \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": rpc error: code = NotFound desc = could not find container \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": container with ID starting with 998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.448773 4747 scope.go:117] "RemoveContainer" containerID="bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad" Feb 02 09:05:32 crc kubenswrapper[4747]: E0202 09:05:32.449157 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": container with ID starting with bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad not found: ID does not exist" containerID="bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.449183 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} err="failed to get container status \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": rpc error: code = NotFound desc = could not find container \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": container with ID starting with bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.449229 4747 scope.go:117] "RemoveContainer" containerID="d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.449660 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} err="failed to get container status \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": rpc error: code = NotFound desc = could not find container \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": container with ID starting with d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.449683 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.450011 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} err="failed to get container status \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": rpc error: code = NotFound desc = could not find container \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": container with ID starting with fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.450035 4747 scope.go:117] "RemoveContainer" containerID="8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.450708 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} err="failed to get container status \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": rpc error: code = NotFound desc = could not find container \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": container with ID starting with 8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.450736 4747 scope.go:117] "RemoveContainer" containerID="18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.451303 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} err="failed to get container status \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": rpc error: code = NotFound desc = could not find container \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": container with ID starting with 18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.451332 4747 scope.go:117] "RemoveContainer" containerID="d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.451654 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} err="failed to get container status \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": rpc error: code = NotFound desc = could not find container \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": container with ID starting with d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.451682 4747 scope.go:117] "RemoveContainer" containerID="cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.452145 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} err="failed to get container status \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": rpc error: code = NotFound desc = could not find container \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": container with ID starting with cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.452187 4747 scope.go:117] "RemoveContainer" containerID="df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.452537 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} err="failed to get container status \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": rpc error: code = NotFound desc = could not find container \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": container with ID starting with df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.452571 4747 scope.go:117] "RemoveContainer" containerID="70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.453070 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} err="failed to get container status \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": rpc error: code = NotFound desc = could not find container \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": container with ID starting with 70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.453096 4747 scope.go:117] "RemoveContainer" containerID="998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.453556 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} err="failed to get container status \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": rpc error: code = NotFound desc = could not find container \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": container with ID starting with 998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.453589 4747 scope.go:117] "RemoveContainer" containerID="bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.454094 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} err="failed to get container status \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": rpc error: code = NotFound desc = could not find container \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": container with ID starting with bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.454402 4747 scope.go:117] "RemoveContainer" containerID="d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.458777 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} err="failed to get container status \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": rpc error: code = NotFound desc = could not find container \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": container with ID starting with d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.458814 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.459296 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} err="failed to get container status \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": rpc error: code = NotFound desc = could not find container \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": container with ID starting with fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.459325 4747 scope.go:117] "RemoveContainer" containerID="8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.461302 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} err="failed to get container status \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": rpc error: code = NotFound desc = could not find container \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": container with ID starting with 8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.461365 4747 scope.go:117] "RemoveContainer" containerID="18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.461775 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} err="failed to get container status \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": rpc error: code = NotFound desc = could not find container \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": container with ID starting with 18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.461811 4747 scope.go:117] "RemoveContainer" containerID="d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.462331 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} err="failed to get container status \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": rpc error: code = NotFound desc = could not find container \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": container with ID starting with d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.462374 4747 scope.go:117] "RemoveContainer" containerID="cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.462670 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} err="failed to get container status \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": rpc error: code = NotFound desc = could not find container \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": container with ID starting with cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.462707 4747 scope.go:117] "RemoveContainer" containerID="df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463034 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} err="failed to get container status \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": rpc error: code = NotFound desc = could not find container \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": container with ID starting with df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463064 4747 scope.go:117] "RemoveContainer" containerID="70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463305 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} err="failed to get container status \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": rpc error: code = NotFound desc = could not find container \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": container with ID starting with 70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463348 4747 scope.go:117] "RemoveContainer" containerID="998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463616 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} err="failed to get container status \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": rpc error: code = NotFound desc = could not find container \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": container with ID starting with 998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463639 4747 scope.go:117] "RemoveContainer" containerID="bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463856 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} err="failed to get container status \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": rpc error: code = NotFound desc = could not find container \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": container with ID starting with bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.463875 4747 scope.go:117] "RemoveContainer" containerID="d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464121 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d"} err="failed to get container status \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": rpc error: code = NotFound desc = could not find container \"d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d\": container with ID starting with d02ad74cb66c2e8d022e7a509a7de2e03cc69969a2d87d073d1b4c1c89fd968d not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464153 4747 scope.go:117] "RemoveContainer" containerID="fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464384 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266"} err="failed to get container status \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": rpc error: code = NotFound desc = could not find container \"fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266\": container with ID starting with fc113fed135c25680c126c0f873c7c1cadcb8c28b77e743dda90ef9eba390266 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464410 4747 scope.go:117] "RemoveContainer" containerID="8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464649 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5"} err="failed to get container status \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": rpc error: code = NotFound desc = could not find container \"8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5\": container with ID starting with 8cfb5c6a94ba415502f6202e33ff46bc60115bae666bb1574f2a0df5cae801b5 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464683 4747 scope.go:117] "RemoveContainer" containerID="18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464913 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774"} err="failed to get container status \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": rpc error: code = NotFound desc = could not find container \"18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774\": container with ID starting with 18f29a99943d8dec88eea776f5756a9977a85453fece77726c0d1e0da1319774 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.464966 4747 scope.go:117] "RemoveContainer" containerID="d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.465264 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347"} err="failed to get container status \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": rpc error: code = NotFound desc = could not find container \"d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347\": container with ID starting with d24f10a3e1cfa335893dba00ff2dfad2281cef9c2d9f196fbfdcf2399cec1347 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.465288 4747 scope.go:117] "RemoveContainer" containerID="cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.465738 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94"} err="failed to get container status \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": rpc error: code = NotFound desc = could not find container \"cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94\": container with ID starting with cd542c9d77d211bb33819818ce846ffc62cd699cccab26036efc1baac87f2a94 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.465765 4747 scope.go:117] "RemoveContainer" containerID="df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.466005 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e"} err="failed to get container status \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": rpc error: code = NotFound desc = could not find container \"df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e\": container with ID starting with df8febd6bf8cfb2f48dc8f18787ad42b59b41400f194d6a664505bc8f556679e not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.466034 4747 scope.go:117] "RemoveContainer" containerID="70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.466378 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac"} err="failed to get container status \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": rpc error: code = NotFound desc = could not find container \"70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac\": container with ID starting with 70d4e7f650718d434268addebcd4dd23b1438c8b27d9afcb0cc0d769103d70ac not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.466407 4747 scope.go:117] "RemoveContainer" containerID="998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.466617 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176"} err="failed to get container status \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": rpc error: code = NotFound desc = could not find container \"998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176\": container with ID starting with 998780f669fbcdbea153a71af5e8bd4a1e81d366dffbf8bde71ab1fc54bb7176 not found: ID does not exist" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.466643 4747 scope.go:117] "RemoveContainer" containerID="bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad" Feb 02 09:05:32 crc kubenswrapper[4747]: I0202 09:05:32.466969 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad"} err="failed to get container status \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": rpc error: code = NotFound desc = could not find container \"bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad\": container with ID starting with bb105c61b24856008b9089fc6d73a89e5a44fccfe174c8be85c7b25fc45812ad not found: ID does not exist" Feb 02 09:05:33 crc kubenswrapper[4747]: I0202 09:05:33.218698 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"47f3f079a43eed55294739183e9a5cf44403d3b596bbc2ec675de5d5fbc61bbe"} Feb 02 09:05:33 crc kubenswrapper[4747]: I0202 09:05:33.218742 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"3f6d96b75a2cda37eb629fda118e2b9e762e39dace67f5398d3ad032dd5c9156"} Feb 02 09:05:33 crc kubenswrapper[4747]: I0202 09:05:33.218753 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"f9b66652e95d0cd201acab81e4c1b0c42964f0cb18f358d3680f07e1328d99bb"} Feb 02 09:05:33 crc kubenswrapper[4747]: I0202 09:05:33.218764 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"4091e96ef2d219a00ef1f47143282d342c28f6bf67b2b8e738107574ef2da386"} Feb 02 09:05:33 crc kubenswrapper[4747]: I0202 09:05:33.218774 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"26bf5630054940def1c014f37af844197c4f2bb8b76ff85b0796050566130172"} Feb 02 09:05:33 crc kubenswrapper[4747]: I0202 09:05:33.218783 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"68eb8168b0afa8991576d53f402065df8280ee02fdc937a078cc1a25b8719add"} Feb 02 09:05:33 crc kubenswrapper[4747]: I0202 09:05:33.220548 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/2.log" Feb 02 09:05:35 crc kubenswrapper[4747]: I0202 09:05:35.235445 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"b329c0a5b3548724288c313a71127fb82b7b862600aa76c8c69ecb3de8aafdd9"} Feb 02 09:05:38 crc kubenswrapper[4747]: I0202 09:05:38.256332 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" event={"ID":"6e309b88-c22c-434e-a7a4-86af050e55f8","Type":"ContainerStarted","Data":"4c532c350621d5eee3bbee7ee3eb3a5753fe3afc2527f9271d34dbf3cfc0d562"} Feb 02 09:05:38 crc kubenswrapper[4747]: I0202 09:05:38.256923 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:38 crc kubenswrapper[4747]: I0202 09:05:38.256950 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:38 crc kubenswrapper[4747]: I0202 09:05:38.295269 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:38 crc kubenswrapper[4747]: I0202 09:05:38.302341 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" podStartSLOduration=7.302326398 podStartE2EDuration="7.302326398s" podCreationTimestamp="2026-02-02 09:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:05:38.296974475 +0000 UTC m=+550.841312928" watchObservedRunningTime="2026-02-02 09:05:38.302326398 +0000 UTC m=+550.846664831" Feb 02 09:05:39 crc kubenswrapper[4747]: I0202 09:05:39.261543 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:39 crc kubenswrapper[4747]: I0202 09:05:39.332187 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:05:44 crc kubenswrapper[4747]: I0202 09:05:44.353257 4747 scope.go:117] "RemoveContainer" containerID="2cff78528477bd23523c39147d9b5f1e1c358354af733c79baed7b8ed2791b9d" Feb 02 09:05:44 crc kubenswrapper[4747]: E0202 09:05:44.353897 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pvnm7_openshift-multus(b1945e48-8aba-4a55-8dce-18e4a87ce4c5)\"" pod="openshift-multus/multus-pvnm7" podUID="b1945e48-8aba-4a55-8dce-18e4a87ce4c5" Feb 02 09:05:45 crc kubenswrapper[4747]: I0202 09:05:45.519894 4747 scope.go:117] "RemoveContainer" containerID="56e6a5bc39fc32e9958b5ffa7f60bc829c13b6f09dc244260a8ed9d10ec3b0a3" Feb 02 09:05:50 crc kubenswrapper[4747]: I0202 09:05:50.519253 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:05:50 crc kubenswrapper[4747]: I0202 09:05:50.519532 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:05:59 crc kubenswrapper[4747]: I0202 09:05:59.339292 4747 scope.go:117] "RemoveContainer" containerID="2cff78528477bd23523c39147d9b5f1e1c358354af733c79baed7b8ed2791b9d" Feb 02 09:06:00 crc kubenswrapper[4747]: I0202 09:06:00.408441 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvnm7_b1945e48-8aba-4a55-8dce-18e4a87ce4c5/kube-multus/2.log" Feb 02 09:06:00 crc kubenswrapper[4747]: I0202 09:06:00.408962 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvnm7" event={"ID":"b1945e48-8aba-4a55-8dce-18e4a87ce4c5","Type":"ContainerStarted","Data":"3192d6a3c76ee24c681b12e090b75c07e7c2dcb6d71fb69705db92535d3a72b3"} Feb 02 09:06:01 crc kubenswrapper[4747]: I0202 09:06:01.953069 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-n2xc9" Feb 02 09:06:08 crc kubenswrapper[4747]: I0202 09:06:08.958682 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh"] Feb 02 09:06:08 crc kubenswrapper[4747]: I0202 09:06:08.960230 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:08 crc kubenswrapper[4747]: I0202 09:06:08.963536 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 09:06:08 crc kubenswrapper[4747]: I0202 09:06:08.969136 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh"] Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.081341 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.081422 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.081686 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjfmg\" (UniqueName: \"kubernetes.io/projected/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-kube-api-access-cjfmg\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.183234 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.183334 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.183501 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjfmg\" (UniqueName: \"kubernetes.io/projected/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-kube-api-access-cjfmg\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.183754 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.183919 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.207993 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjfmg\" (UniqueName: \"kubernetes.io/projected/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-kube-api-access-cjfmg\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.337334 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:09 crc kubenswrapper[4747]: I0202 09:06:09.774702 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh"] Feb 02 09:06:09 crc kubenswrapper[4747]: W0202 09:06:09.780772 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cdc4e0f_2a37_4329_82df_e8a06f3d50db.slice/crio-9463ca44570d911aa6363e35388bacc53dac3810679fd7cbe819b36d300eca5c WatchSource:0}: Error finding container 9463ca44570d911aa6363e35388bacc53dac3810679fd7cbe819b36d300eca5c: Status 404 returned error can't find the container with id 9463ca44570d911aa6363e35388bacc53dac3810679fd7cbe819b36d300eca5c Feb 02 09:06:10 crc kubenswrapper[4747]: I0202 09:06:10.485598 4747 generic.go:334] "Generic (PLEG): container finished" podID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerID="2a7e6c25a890f939940484b93d7d7ebaa30ec1b9f13048ff5854905f2cda53eb" exitCode=0 Feb 02 09:06:10 crc kubenswrapper[4747]: I0202 09:06:10.485643 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" event={"ID":"7cdc4e0f-2a37-4329-82df-e8a06f3d50db","Type":"ContainerDied","Data":"2a7e6c25a890f939940484b93d7d7ebaa30ec1b9f13048ff5854905f2cda53eb"} Feb 02 09:06:10 crc kubenswrapper[4747]: I0202 09:06:10.485673 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" event={"ID":"7cdc4e0f-2a37-4329-82df-e8a06f3d50db","Type":"ContainerStarted","Data":"9463ca44570d911aa6363e35388bacc53dac3810679fd7cbe819b36d300eca5c"} Feb 02 09:06:12 crc kubenswrapper[4747]: I0202 09:06:12.500703 4747 generic.go:334] "Generic (PLEG): container finished" podID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerID="b231061dcce07be9f0a81f2c179392429f7573146bea5152b1de786aef8942e8" exitCode=0 Feb 02 09:06:12 crc kubenswrapper[4747]: I0202 09:06:12.500753 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" event={"ID":"7cdc4e0f-2a37-4329-82df-e8a06f3d50db","Type":"ContainerDied","Data":"b231061dcce07be9f0a81f2c179392429f7573146bea5152b1de786aef8942e8"} Feb 02 09:06:13 crc kubenswrapper[4747]: I0202 09:06:13.510553 4747 generic.go:334] "Generic (PLEG): container finished" podID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerID="8e5e48ef59cad861fa052120b65f533223b3486f38f1a3bbb1603d7e3c3479d3" exitCode=0 Feb 02 09:06:13 crc kubenswrapper[4747]: I0202 09:06:13.510611 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" event={"ID":"7cdc4e0f-2a37-4329-82df-e8a06f3d50db","Type":"ContainerDied","Data":"8e5e48ef59cad861fa052120b65f533223b3486f38f1a3bbb1603d7e3c3479d3"} Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.766875 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.870920 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-util\") pod \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.871047 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjfmg\" (UniqueName: \"kubernetes.io/projected/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-kube-api-access-cjfmg\") pod \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.871092 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-bundle\") pod \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\" (UID: \"7cdc4e0f-2a37-4329-82df-e8a06f3d50db\") " Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.871606 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-bundle" (OuterVolumeSpecName: "bundle") pod "7cdc4e0f-2a37-4329-82df-e8a06f3d50db" (UID: "7cdc4e0f-2a37-4329-82df-e8a06f3d50db"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.879375 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-kube-api-access-cjfmg" (OuterVolumeSpecName: "kube-api-access-cjfmg") pod "7cdc4e0f-2a37-4329-82df-e8a06f3d50db" (UID: "7cdc4e0f-2a37-4329-82df-e8a06f3d50db"). InnerVolumeSpecName "kube-api-access-cjfmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.884170 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-util" (OuterVolumeSpecName: "util") pod "7cdc4e0f-2a37-4329-82df-e8a06f3d50db" (UID: "7cdc4e0f-2a37-4329-82df-e8a06f3d50db"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.972551 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-util\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.972594 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjfmg\" (UniqueName: \"kubernetes.io/projected/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-kube-api-access-cjfmg\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:14 crc kubenswrapper[4747]: I0202 09:06:14.972608 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7cdc4e0f-2a37-4329-82df-e8a06f3d50db-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:15 crc kubenswrapper[4747]: I0202 09:06:15.521770 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" event={"ID":"7cdc4e0f-2a37-4329-82df-e8a06f3d50db","Type":"ContainerDied","Data":"9463ca44570d911aa6363e35388bacc53dac3810679fd7cbe819b36d300eca5c"} Feb 02 09:06:15 crc kubenswrapper[4747]: I0202 09:06:15.521806 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9463ca44570d911aa6363e35388bacc53dac3810679fd7cbe819b36d300eca5c" Feb 02 09:06:15 crc kubenswrapper[4747]: I0202 09:06:15.521834 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.608211 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-l87fs"] Feb 02 09:06:16 crc kubenswrapper[4747]: E0202 09:06:16.608680 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerName="pull" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.608692 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerName="pull" Feb 02 09:06:16 crc kubenswrapper[4747]: E0202 09:06:16.608703 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerName="util" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.608709 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerName="util" Feb 02 09:06:16 crc kubenswrapper[4747]: E0202 09:06:16.608719 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerName="extract" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.608725 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerName="extract" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.608816 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cdc4e0f-2a37-4329-82df-e8a06f3d50db" containerName="extract" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.609154 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.610929 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.611185 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.611461 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-qpcqj" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.623306 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-l87fs"] Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.690787 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tjxp\" (UniqueName: \"kubernetes.io/projected/2d26069a-13b2-4f8a-ba4b-a25bfd428db3-kube-api-access-8tjxp\") pod \"nmstate-operator-646758c888-l87fs\" (UID: \"2d26069a-13b2-4f8a-ba4b-a25bfd428db3\") " pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.791519 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tjxp\" (UniqueName: \"kubernetes.io/projected/2d26069a-13b2-4f8a-ba4b-a25bfd428db3-kube-api-access-8tjxp\") pod \"nmstate-operator-646758c888-l87fs\" (UID: \"2d26069a-13b2-4f8a-ba4b-a25bfd428db3\") " pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.809843 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tjxp\" (UniqueName: \"kubernetes.io/projected/2d26069a-13b2-4f8a-ba4b-a25bfd428db3-kube-api-access-8tjxp\") pod \"nmstate-operator-646758c888-l87fs\" (UID: \"2d26069a-13b2-4f8a-ba4b-a25bfd428db3\") " pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" Feb 02 09:06:16 crc kubenswrapper[4747]: I0202 09:06:16.921980 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" Feb 02 09:06:17 crc kubenswrapper[4747]: I0202 09:06:17.165810 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-l87fs"] Feb 02 09:06:17 crc kubenswrapper[4747]: W0202 09:06:17.171108 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d26069a_13b2_4f8a_ba4b_a25bfd428db3.slice/crio-2d4d1d7972f1be470ed42359be7a6068a8481759f35f563467f33c5951b35a25 WatchSource:0}: Error finding container 2d4d1d7972f1be470ed42359be7a6068a8481759f35f563467f33c5951b35a25: Status 404 returned error can't find the container with id 2d4d1d7972f1be470ed42359be7a6068a8481759f35f563467f33c5951b35a25 Feb 02 09:06:17 crc kubenswrapper[4747]: I0202 09:06:17.533859 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" event={"ID":"2d26069a-13b2-4f8a-ba4b-a25bfd428db3","Type":"ContainerStarted","Data":"2d4d1d7972f1be470ed42359be7a6068a8481759f35f563467f33c5951b35a25"} Feb 02 09:06:19 crc kubenswrapper[4747]: I0202 09:06:19.546652 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" event={"ID":"2d26069a-13b2-4f8a-ba4b-a25bfd428db3","Type":"ContainerStarted","Data":"5952c7b7d9175a8ec8a712c1dfeae56d823fa840b5f02fb1cf2b6dd26b2433e9"} Feb 02 09:06:19 crc kubenswrapper[4747]: I0202 09:06:19.563100 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-l87fs" podStartSLOduration=1.360816617 podStartE2EDuration="3.563081836s" podCreationTimestamp="2026-02-02 09:06:16 +0000 UTC" firstStartedPulling="2026-02-02 09:06:17.172975766 +0000 UTC m=+589.717314199" lastFinishedPulling="2026-02-02 09:06:19.375240975 +0000 UTC m=+591.919579418" observedRunningTime="2026-02-02 09:06:19.561479307 +0000 UTC m=+592.105817760" watchObservedRunningTime="2026-02-02 09:06:19.563081836 +0000 UTC m=+592.107420269" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.401446 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-8zx7l"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.402537 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.405146 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-bd5g2" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.407436 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-8zx7l"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.413547 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.414396 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.416073 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.427313 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.464232 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-dmcw9"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.465050 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.518314 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.518425 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.544105 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e354bf65-f7c8-4fab-a288-ff0ffb879c62-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-8t7kj\" (UID: \"e354bf65-f7c8-4fab-a288-ff0ffb879c62\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.544214 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhkcg\" (UniqueName: \"kubernetes.io/projected/622130de-c2ad-4b1f-90bc-78f0173d2fe2-kube-api-access-xhkcg\") pod \"nmstate-metrics-54757c584b-8zx7l\" (UID: \"622130de-c2ad-4b1f-90bc-78f0173d2fe2\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.544261 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd4gh\" (UniqueName: \"kubernetes.io/projected/e354bf65-f7c8-4fab-a288-ff0ffb879c62-kube-api-access-sd4gh\") pod \"nmstate-webhook-8474b5b9d8-8t7kj\" (UID: \"e354bf65-f7c8-4fab-a288-ff0ffb879c62\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.553555 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.554352 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.556121 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.556384 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-cnkxg" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.556831 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.567759 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.645027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e354bf65-f7c8-4fab-a288-ff0ffb879c62-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-8t7kj\" (UID: \"e354bf65-f7c8-4fab-a288-ff0ffb879c62\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.645090 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhkcg\" (UniqueName: \"kubernetes.io/projected/622130de-c2ad-4b1f-90bc-78f0173d2fe2-kube-api-access-xhkcg\") pod \"nmstate-metrics-54757c584b-8zx7l\" (UID: \"622130de-c2ad-4b1f-90bc-78f0173d2fe2\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.645131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-nmstate-lock\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.645163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd4gh\" (UniqueName: \"kubernetes.io/projected/e354bf65-f7c8-4fab-a288-ff0ffb879c62-kube-api-access-sd4gh\") pod \"nmstate-webhook-8474b5b9d8-8t7kj\" (UID: \"e354bf65-f7c8-4fab-a288-ff0ffb879c62\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.645196 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jw29\" (UniqueName: \"kubernetes.io/projected/273a950f-b0c3-4f52-be28-cae9de106aaf-kube-api-access-4jw29\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.645223 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-dbus-socket\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.645251 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-ovs-socket\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.652845 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e354bf65-f7c8-4fab-a288-ff0ffb879c62-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-8t7kj\" (UID: \"e354bf65-f7c8-4fab-a288-ff0ffb879c62\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.665058 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhkcg\" (UniqueName: \"kubernetes.io/projected/622130de-c2ad-4b1f-90bc-78f0173d2fe2-kube-api-access-xhkcg\") pod \"nmstate-metrics-54757c584b-8zx7l\" (UID: \"622130de-c2ad-4b1f-90bc-78f0173d2fe2\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.665888 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd4gh\" (UniqueName: \"kubernetes.io/projected/e354bf65-f7c8-4fab-a288-ff0ffb879c62-kube-api-access-sd4gh\") pod \"nmstate-webhook-8474b5b9d8-8t7kj\" (UID: \"e354bf65-f7c8-4fab-a288-ff0ffb879c62\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.727768 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.738100 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746427 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-nmstate-lock\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746492 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jw29\" (UniqueName: \"kubernetes.io/projected/273a950f-b0c3-4f52-be28-cae9de106aaf-kube-api-access-4jw29\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746516 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-dbus-socket\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746544 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-ovs-socket\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746576 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8efb24a8-e59e-47da-a5eb-253fd3b215c1-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746625 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc88l\" (UniqueName: \"kubernetes.io/projected/8efb24a8-e59e-47da-a5eb-253fd3b215c1-kube-api-access-bc88l\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746651 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8efb24a8-e59e-47da-a5eb-253fd3b215c1-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.746735 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-nmstate-lock\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.747004 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-ovs-socket\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.747738 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/273a950f-b0c3-4f52-be28-cae9de106aaf-dbus-socket\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.762989 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7b5885c8df-p96k4"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.780665 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jw29\" (UniqueName: \"kubernetes.io/projected/273a950f-b0c3-4f52-be28-cae9de106aaf-kube-api-access-4jw29\") pod \"nmstate-handler-dmcw9\" (UID: \"273a950f-b0c3-4f52-be28-cae9de106aaf\") " pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.781253 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.793460 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.805381 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b5885c8df-p96k4"] Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.848043 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8efb24a8-e59e-47da-a5eb-253fd3b215c1-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.848296 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc88l\" (UniqueName: \"kubernetes.io/projected/8efb24a8-e59e-47da-a5eb-253fd3b215c1-kube-api-access-bc88l\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.848320 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8efb24a8-e59e-47da-a5eb-253fd3b215c1-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.849321 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8efb24a8-e59e-47da-a5eb-253fd3b215c1-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.853056 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8efb24a8-e59e-47da-a5eb-253fd3b215c1-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.867663 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc88l\" (UniqueName: \"kubernetes.io/projected/8efb24a8-e59e-47da-a5eb-253fd3b215c1-kube-api-access-bc88l\") pod \"nmstate-console-plugin-7754f76f8b-5zpv9\" (UID: \"8efb24a8-e59e-47da-a5eb-253fd3b215c1\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.867970 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.950489 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-serving-cert\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.950536 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-config\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.950593 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-service-ca\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.950616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh92c\" (UniqueName: \"kubernetes.io/projected/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-kube-api-access-kh92c\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.950654 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-trusted-ca-bundle\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.950679 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-oauth-config\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.950726 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-oauth-serving-cert\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:20 crc kubenswrapper[4747]: I0202 09:06:20.997317 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-8zx7l"] Feb 02 09:06:21 crc kubenswrapper[4747]: W0202 09:06:21.005002 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod622130de_c2ad_4b1f_90bc_78f0173d2fe2.slice/crio-ee8cfd81d2d5600c3724b1b85c2738733e33998ef9e3a061ad87ca27039345f9 WatchSource:0}: Error finding container ee8cfd81d2d5600c3724b1b85c2738733e33998ef9e3a061ad87ca27039345f9: Status 404 returned error can't find the container with id ee8cfd81d2d5600c3724b1b85c2738733e33998ef9e3a061ad87ca27039345f9 Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.051761 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-oauth-config\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.051847 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-oauth-serving-cert\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.051882 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-serving-cert\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.051911 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-config\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.051981 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-service-ca\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.052010 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh92c\" (UniqueName: \"kubernetes.io/projected/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-kube-api-access-kh92c\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.052047 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-trusted-ca-bundle\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.053394 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-config\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.053408 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-oauth-serving-cert\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.053536 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-service-ca\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.054222 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-trusted-ca-bundle\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.055467 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-serving-cert\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.057087 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-console-oauth-config\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.073784 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh92c\" (UniqueName: \"kubernetes.io/projected/eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0-kube-api-access-kh92c\") pod \"console-7b5885c8df-p96k4\" (UID: \"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0\") " pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.115034 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9"] Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.123335 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.212573 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj"] Feb 02 09:06:21 crc kubenswrapper[4747]: W0202 09:06:21.219673 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode354bf65_f7c8_4fab_a288_ff0ffb879c62.slice/crio-f3e1185364cecf0f1987abebc7834021fce102b3e356423a5ecb507f25b27481 WatchSource:0}: Error finding container f3e1185364cecf0f1987abebc7834021fce102b3e356423a5ecb507f25b27481: Status 404 returned error can't find the container with id f3e1185364cecf0f1987abebc7834021fce102b3e356423a5ecb507f25b27481 Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.352386 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b5885c8df-p96k4"] Feb 02 09:06:21 crc kubenswrapper[4747]: W0202 09:06:21.355463 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb6257c7_1d42_4c7f_96ad_ab2af5fb7cd0.slice/crio-f8a4ce6c664da3e91093f9e7f458e01ad09d36138005e1a72f41802062ef57e3 WatchSource:0}: Error finding container f8a4ce6c664da3e91093f9e7f458e01ad09d36138005e1a72f41802062ef57e3: Status 404 returned error can't find the container with id f8a4ce6c664da3e91093f9e7f458e01ad09d36138005e1a72f41802062ef57e3 Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.558419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" event={"ID":"622130de-c2ad-4b1f-90bc-78f0173d2fe2","Type":"ContainerStarted","Data":"ee8cfd81d2d5600c3724b1b85c2738733e33998ef9e3a061ad87ca27039345f9"} Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.559565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" event={"ID":"8efb24a8-e59e-47da-a5eb-253fd3b215c1","Type":"ContainerStarted","Data":"5844965ed79e465a0b84ddd8974125159c6cf4ee549be3a36b7cf9efcb48ebbc"} Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.561117 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b5885c8df-p96k4" event={"ID":"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0","Type":"ContainerStarted","Data":"7106349d7dd813685e7f51b2101b1a807244e4ad42c3886b23e5d55ec5bef196"} Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.561140 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b5885c8df-p96k4" event={"ID":"eb6257c7-1d42-4c7f-96ad-ab2af5fb7cd0","Type":"ContainerStarted","Data":"f8a4ce6c664da3e91093f9e7f458e01ad09d36138005e1a72f41802062ef57e3"} Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.562037 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-dmcw9" event={"ID":"273a950f-b0c3-4f52-be28-cae9de106aaf","Type":"ContainerStarted","Data":"f65fe75d6d9ece9d0e78ebf3190e351ccbaa74fc4192a7aa704fbfe48c4af5ce"} Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.563380 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" event={"ID":"e354bf65-f7c8-4fab-a288-ff0ffb879c62","Type":"ContainerStarted","Data":"f3e1185364cecf0f1987abebc7834021fce102b3e356423a5ecb507f25b27481"} Feb 02 09:06:21 crc kubenswrapper[4747]: I0202 09:06:21.587369 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7b5885c8df-p96k4" podStartSLOduration=1.587339097 podStartE2EDuration="1.587339097s" podCreationTimestamp="2026-02-02 09:06:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:06:21.584304062 +0000 UTC m=+594.128642505" watchObservedRunningTime="2026-02-02 09:06:21.587339097 +0000 UTC m=+594.131677530" Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.591148 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" event={"ID":"e354bf65-f7c8-4fab-a288-ff0ffb879c62","Type":"ContainerStarted","Data":"2af790f9401614800f44dfbe2f7a0d6a34e8005b2848ba9de9173da930309bf0"} Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.591902 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.593663 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" event={"ID":"622130de-c2ad-4b1f-90bc-78f0173d2fe2","Type":"ContainerStarted","Data":"fed2399a9a1852ec4413387ac9d8240664ee137dc7097538fe65df77ac9254e9"} Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.595328 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" event={"ID":"8efb24a8-e59e-47da-a5eb-253fd3b215c1","Type":"ContainerStarted","Data":"244f20f4c9fcbe94619626314ba3f38eb5ae96ae69b58fc9c47671ada8ad5e6b"} Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.596375 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-dmcw9" event={"ID":"273a950f-b0c3-4f52-be28-cae9de106aaf","Type":"ContainerStarted","Data":"079e05d81c8c0a37616bdba60ebd4cb79f0d0cca9175df2b59f4731aee58c44e"} Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.596925 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.611021 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" podStartSLOduration=2.393421483 podStartE2EDuration="4.611002749s" podCreationTimestamp="2026-02-02 09:06:20 +0000 UTC" firstStartedPulling="2026-02-02 09:06:21.222162865 +0000 UTC m=+593.766501298" lastFinishedPulling="2026-02-02 09:06:23.439744131 +0000 UTC m=+595.984082564" observedRunningTime="2026-02-02 09:06:24.60699467 +0000 UTC m=+597.151333113" watchObservedRunningTime="2026-02-02 09:06:24.611002749 +0000 UTC m=+597.155341182" Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.620571 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-5zpv9" podStartSLOduration=2.33708624 podStartE2EDuration="4.620551185s" podCreationTimestamp="2026-02-02 09:06:20 +0000 UTC" firstStartedPulling="2026-02-02 09:06:21.14912225 +0000 UTC m=+593.693460683" lastFinishedPulling="2026-02-02 09:06:23.432587195 +0000 UTC m=+595.976925628" observedRunningTime="2026-02-02 09:06:24.619616721 +0000 UTC m=+597.163955164" watchObservedRunningTime="2026-02-02 09:06:24.620551185 +0000 UTC m=+597.164889618" Feb 02 09:06:24 crc kubenswrapper[4747]: I0202 09:06:24.633975 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-dmcw9" podStartSLOduration=2.022836146 podStartE2EDuration="4.633956046s" podCreationTimestamp="2026-02-02 09:06:20 +0000 UTC" firstStartedPulling="2026-02-02 09:06:20.821872975 +0000 UTC m=+593.366211398" lastFinishedPulling="2026-02-02 09:06:23.432992845 +0000 UTC m=+595.977331298" observedRunningTime="2026-02-02 09:06:24.632813657 +0000 UTC m=+597.177152090" watchObservedRunningTime="2026-02-02 09:06:24.633956046 +0000 UTC m=+597.178294479" Feb 02 09:06:25 crc kubenswrapper[4747]: I0202 09:06:25.605489 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" event={"ID":"622130de-c2ad-4b1f-90bc-78f0173d2fe2","Type":"ContainerStarted","Data":"de25b5a725a2a8be55f6160b667724c462c533a9e90a4d55884224405f65c0b7"} Feb 02 09:06:25 crc kubenswrapper[4747]: I0202 09:06:25.626835 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-8zx7l" podStartSLOduration=1.198630924 podStartE2EDuration="5.626818427s" podCreationTimestamp="2026-02-02 09:06:20 +0000 UTC" firstStartedPulling="2026-02-02 09:06:21.007176563 +0000 UTC m=+593.551515006" lastFinishedPulling="2026-02-02 09:06:25.435364076 +0000 UTC m=+597.979702509" observedRunningTime="2026-02-02 09:06:25.624878989 +0000 UTC m=+598.169217422" watchObservedRunningTime="2026-02-02 09:06:25.626818427 +0000 UTC m=+598.171156860" Feb 02 09:06:30 crc kubenswrapper[4747]: I0202 09:06:30.808254 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-dmcw9" Feb 02 09:06:31 crc kubenswrapper[4747]: I0202 09:06:31.124372 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:31 crc kubenswrapper[4747]: I0202 09:06:31.124508 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:31 crc kubenswrapper[4747]: I0202 09:06:31.128800 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:31 crc kubenswrapper[4747]: I0202 09:06:31.651435 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7b5885c8df-p96k4" Feb 02 09:06:31 crc kubenswrapper[4747]: I0202 09:06:31.701627 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-s6zqr"] Feb 02 09:06:40 crc kubenswrapper[4747]: I0202 09:06:40.743373 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-8t7kj" Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.519317 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.519931 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.520014 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.520675 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9040071d110153768f64a97f2d933e497b3389e6abc689c7c55f4c1a81e9d70b"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.520744 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://9040071d110153768f64a97f2d933e497b3389e6abc689c7c55f4c1a81e9d70b" gracePeriod=600 Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.756758 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="9040071d110153768f64a97f2d933e497b3389e6abc689c7c55f4c1a81e9d70b" exitCode=0 Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.756792 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"9040071d110153768f64a97f2d933e497b3389e6abc689c7c55f4c1a81e9d70b"} Feb 02 09:06:50 crc kubenswrapper[4747]: I0202 09:06:50.757333 4747 scope.go:117] "RemoveContainer" containerID="7cee0f9aad4ab5e00f2a9b56f3dbf6ac0c95c1aa267e52c4fe110c7bcef4e605" Feb 02 09:06:51 crc kubenswrapper[4747]: I0202 09:06:51.769618 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"b0adff2464258c94f1128c08c9ab4c452fc551d8ccc27b1f6ec0c8bc78ed9f7b"} Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.794558 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8"] Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.796534 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.800958 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.806794 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8"] Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.837668 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.837793 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.837858 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztpsl\" (UniqueName: \"kubernetes.io/projected/f74f35ee-5609-4669-a870-a67f99347446-kube-api-access-ztpsl\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.939835 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.939990 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.940071 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztpsl\" (UniqueName: \"kubernetes.io/projected/f74f35ee-5609-4669-a870-a67f99347446-kube-api-access-ztpsl\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.940512 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.940601 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:52 crc kubenswrapper[4747]: I0202 09:06:52.972595 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztpsl\" (UniqueName: \"kubernetes.io/projected/f74f35ee-5609-4669-a870-a67f99347446-kube-api-access-ztpsl\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:53 crc kubenswrapper[4747]: I0202 09:06:53.116544 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:53 crc kubenswrapper[4747]: I0202 09:06:53.302156 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8"] Feb 02 09:06:53 crc kubenswrapper[4747]: W0202 09:06:53.308097 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf74f35ee_5609_4669_a870_a67f99347446.slice/crio-c3dbfd2c012ddc499e78c12650de893fa4c015c64b1ee2a080ee125e4d9c827d WatchSource:0}: Error finding container c3dbfd2c012ddc499e78c12650de893fa4c015c64b1ee2a080ee125e4d9c827d: Status 404 returned error can't find the container with id c3dbfd2c012ddc499e78c12650de893fa4c015c64b1ee2a080ee125e4d9c827d Feb 02 09:06:53 crc kubenswrapper[4747]: I0202 09:06:53.781986 4747 generic.go:334] "Generic (PLEG): container finished" podID="f74f35ee-5609-4669-a870-a67f99347446" containerID="1e56227cafb5c3df737cf610799bafab569f92fa5ad8f4c377930efb5361e6d4" exitCode=0 Feb 02 09:06:53 crc kubenswrapper[4747]: I0202 09:06:53.782047 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" event={"ID":"f74f35ee-5609-4669-a870-a67f99347446","Type":"ContainerDied","Data":"1e56227cafb5c3df737cf610799bafab569f92fa5ad8f4c377930efb5361e6d4"} Feb 02 09:06:53 crc kubenswrapper[4747]: I0202 09:06:53.782103 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" event={"ID":"f74f35ee-5609-4669-a870-a67f99347446","Type":"ContainerStarted","Data":"c3dbfd2c012ddc499e78c12650de893fa4c015c64b1ee2a080ee125e4d9c827d"} Feb 02 09:06:55 crc kubenswrapper[4747]: I0202 09:06:55.801390 4747 generic.go:334] "Generic (PLEG): container finished" podID="f74f35ee-5609-4669-a870-a67f99347446" containerID="8ab71621f6b542913cb525dc2a7f8f9f1b004af7fe6f4ddb0d2fc53e9bf7d36c" exitCode=0 Feb 02 09:06:55 crc kubenswrapper[4747]: I0202 09:06:55.801494 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" event={"ID":"f74f35ee-5609-4669-a870-a67f99347446","Type":"ContainerDied","Data":"8ab71621f6b542913cb525dc2a7f8f9f1b004af7fe6f4ddb0d2fc53e9bf7d36c"} Feb 02 09:06:56 crc kubenswrapper[4747]: I0202 09:06:56.741210 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-s6zqr" podUID="51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" containerName="console" containerID="cri-o://d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51" gracePeriod=15 Feb 02 09:06:56 crc kubenswrapper[4747]: I0202 09:06:56.809485 4747 generic.go:334] "Generic (PLEG): container finished" podID="f74f35ee-5609-4669-a870-a67f99347446" containerID="25a1d170bd5faa940e93ae74bbb0814eada5f71f837ad1a700b1eced7c05099c" exitCode=0 Feb 02 09:06:56 crc kubenswrapper[4747]: I0202 09:06:56.809519 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" event={"ID":"f74f35ee-5609-4669-a870-a67f99347446","Type":"ContainerDied","Data":"25a1d170bd5faa940e93ae74bbb0814eada5f71f837ad1a700b1eced7c05099c"} Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.092842 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-s6zqr_51fdceee-2c8b-4830-a3c9-8f02a0e36cc3/console/0.log" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.092914 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.194279 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-serving-cert\") pod \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.194425 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-trusted-ca-bundle\") pod \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.194486 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-oauth-config\") pod \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.194524 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5zj9\" (UniqueName: \"kubernetes.io/projected/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-kube-api-access-j5zj9\") pod \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.194557 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-oauth-serving-cert\") pod \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.194583 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-config\") pod \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.194611 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-service-ca\") pod \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\" (UID: \"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3\") " Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.195227 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" (UID: "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.195568 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" (UID: "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.195881 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-config" (OuterVolumeSpecName: "console-config") pod "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" (UID: "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.195981 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-service-ca" (OuterVolumeSpecName: "service-ca") pod "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" (UID: "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.201211 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" (UID: "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.201618 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-kube-api-access-j5zj9" (OuterVolumeSpecName: "kube-api-access-j5zj9") pod "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" (UID: "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3"). InnerVolumeSpecName "kube-api-access-j5zj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.201637 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" (UID: "51fdceee-2c8b-4830-a3c9-8f02a0e36cc3"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.295641 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.295685 4747 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.295695 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5zj9\" (UniqueName: \"kubernetes.io/projected/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-kube-api-access-j5zj9\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.295705 4747 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.295714 4747 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.295722 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.295729 4747 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.820355 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-s6zqr_51fdceee-2c8b-4830-a3c9-8f02a0e36cc3/console/0.log" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.820403 4747 generic.go:334] "Generic (PLEG): container finished" podID="51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" containerID="d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51" exitCode=2 Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.820463 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-s6zqr" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.820464 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-s6zqr" event={"ID":"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3","Type":"ContainerDied","Data":"d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51"} Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.820504 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-s6zqr" event={"ID":"51fdceee-2c8b-4830-a3c9-8f02a0e36cc3","Type":"ContainerDied","Data":"f23128c7d79832ccfb536da9288284c5ab9d903b9989c1499c6f5d11795b6084"} Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.820530 4747 scope.go:117] "RemoveContainer" containerID="d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.836186 4747 scope.go:117] "RemoveContainer" containerID="d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51" Feb 02 09:06:57 crc kubenswrapper[4747]: E0202 09:06:57.836612 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51\": container with ID starting with d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51 not found: ID does not exist" containerID="d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.836643 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51"} err="failed to get container status \"d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51\": rpc error: code = NotFound desc = could not find container \"d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51\": container with ID starting with d3434a09835d26e651c1eb8b001ca4df4f0faeba34c26036699a06db435a0c51 not found: ID does not exist" Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.850085 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-s6zqr"] Feb 02 09:06:57 crc kubenswrapper[4747]: I0202 09:06:57.853750 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-s6zqr"] Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.022140 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.105410 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-util\") pod \"f74f35ee-5609-4669-a870-a67f99347446\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.105487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztpsl\" (UniqueName: \"kubernetes.io/projected/f74f35ee-5609-4669-a870-a67f99347446-kube-api-access-ztpsl\") pod \"f74f35ee-5609-4669-a870-a67f99347446\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.105595 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-bundle\") pod \"f74f35ee-5609-4669-a870-a67f99347446\" (UID: \"f74f35ee-5609-4669-a870-a67f99347446\") " Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.106707 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-bundle" (OuterVolumeSpecName: "bundle") pod "f74f35ee-5609-4669-a870-a67f99347446" (UID: "f74f35ee-5609-4669-a870-a67f99347446"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.111645 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f74f35ee-5609-4669-a870-a67f99347446-kube-api-access-ztpsl" (OuterVolumeSpecName: "kube-api-access-ztpsl") pod "f74f35ee-5609-4669-a870-a67f99347446" (UID: "f74f35ee-5609-4669-a870-a67f99347446"). InnerVolumeSpecName "kube-api-access-ztpsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.120877 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-util" (OuterVolumeSpecName: "util") pod "f74f35ee-5609-4669-a870-a67f99347446" (UID: "f74f35ee-5609-4669-a870-a67f99347446"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.207485 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.207540 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f74f35ee-5609-4669-a870-a67f99347446-util\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.207567 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztpsl\" (UniqueName: \"kubernetes.io/projected/f74f35ee-5609-4669-a870-a67f99347446-kube-api-access-ztpsl\") on node \"crc\" DevicePath \"\"" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.353807 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" path="/var/lib/kubelet/pods/51fdceee-2c8b-4830-a3c9-8f02a0e36cc3/volumes" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.830753 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" event={"ID":"f74f35ee-5609-4669-a870-a67f99347446","Type":"ContainerDied","Data":"c3dbfd2c012ddc499e78c12650de893fa4c015c64b1ee2a080ee125e4d9c827d"} Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.830811 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3dbfd2c012ddc499e78c12650de893fa4c015c64b1ee2a080ee125e4d9c827d" Feb 02 09:06:58 crc kubenswrapper[4747]: I0202 09:06:58.830825 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.184915 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj"] Feb 02 09:07:07 crc kubenswrapper[4747]: E0202 09:07:07.185633 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74f35ee-5609-4669-a870-a67f99347446" containerName="util" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.185644 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74f35ee-5609-4669-a870-a67f99347446" containerName="util" Feb 02 09:07:07 crc kubenswrapper[4747]: E0202 09:07:07.185652 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74f35ee-5609-4669-a870-a67f99347446" containerName="pull" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.185657 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74f35ee-5609-4669-a870-a67f99347446" containerName="pull" Feb 02 09:07:07 crc kubenswrapper[4747]: E0202 09:07:07.185670 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74f35ee-5609-4669-a870-a67f99347446" containerName="extract" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.185676 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74f35ee-5609-4669-a870-a67f99347446" containerName="extract" Feb 02 09:07:07 crc kubenswrapper[4747]: E0202 09:07:07.185690 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" containerName="console" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.185696 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" containerName="console" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.185791 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="51fdceee-2c8b-4830-a3c9-8f02a0e36cc3" containerName="console" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.185799 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f74f35ee-5609-4669-a870-a67f99347446" containerName="extract" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.186162 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.188180 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.188702 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-67v8p" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.189169 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.192911 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.193485 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.204008 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj"] Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.219617 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7hsg\" (UniqueName: \"kubernetes.io/projected/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-kube-api-access-n7hsg\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.219659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-webhook-cert\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.219679 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-apiservice-cert\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.320565 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7hsg\" (UniqueName: \"kubernetes.io/projected/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-kube-api-access-n7hsg\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.320624 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-webhook-cert\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.320647 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-apiservice-cert\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.334063 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-webhook-cert\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.334147 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-apiservice-cert\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.338329 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7hsg\" (UniqueName: \"kubernetes.io/projected/cf0b4df2-65a8-4cfa-a77f-a52634ce2b49-kube-api-access-n7hsg\") pod \"metallb-operator-controller-manager-6bf46c8785-7g5mj\" (UID: \"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49\") " pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.450555 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn"] Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.451375 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.453198 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.455097 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-4s95t" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.456623 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.460422 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn"] Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.504874 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.623951 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ba335bdb-560d-4051-8f28-89dab7f4f9cb-apiservice-cert\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.624335 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ba335bdb-560d-4051-8f28-89dab7f4f9cb-webhook-cert\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.624367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6t5l\" (UniqueName: \"kubernetes.io/projected/ba335bdb-560d-4051-8f28-89dab7f4f9cb-kube-api-access-h6t5l\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.725489 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ba335bdb-560d-4051-8f28-89dab7f4f9cb-apiservice-cert\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.725552 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6t5l\" (UniqueName: \"kubernetes.io/projected/ba335bdb-560d-4051-8f28-89dab7f4f9cb-kube-api-access-h6t5l\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.725578 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ba335bdb-560d-4051-8f28-89dab7f4f9cb-webhook-cert\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.736959 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ba335bdb-560d-4051-8f28-89dab7f4f9cb-webhook-cert\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.752434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6t5l\" (UniqueName: \"kubernetes.io/projected/ba335bdb-560d-4051-8f28-89dab7f4f9cb-kube-api-access-h6t5l\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.753601 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj"] Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.763308 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ba335bdb-560d-4051-8f28-89dab7f4f9cb-apiservice-cert\") pod \"metallb-operator-webhook-server-6d99b6746d-8rlqn\" (UID: \"ba335bdb-560d-4051-8f28-89dab7f4f9cb\") " pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.765964 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:07 crc kubenswrapper[4747]: I0202 09:07:07.882309 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" event={"ID":"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49","Type":"ContainerStarted","Data":"30b627003f46948337beb09354365fdc51baf25b17b27fd28bc05a53a478ed56"} Feb 02 09:07:08 crc kubenswrapper[4747]: I0202 09:07:08.162842 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn"] Feb 02 09:07:08 crc kubenswrapper[4747]: W0202 09:07:08.170177 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba335bdb_560d_4051_8f28_89dab7f4f9cb.slice/crio-dda4b731bb211dd3ed7be245e45920633fdd141e1303008c148857c888cb7b12 WatchSource:0}: Error finding container dda4b731bb211dd3ed7be245e45920633fdd141e1303008c148857c888cb7b12: Status 404 returned error can't find the container with id dda4b731bb211dd3ed7be245e45920633fdd141e1303008c148857c888cb7b12 Feb 02 09:07:08 crc kubenswrapper[4747]: I0202 09:07:08.888503 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" event={"ID":"ba335bdb-560d-4051-8f28-89dab7f4f9cb","Type":"ContainerStarted","Data":"dda4b731bb211dd3ed7be245e45920633fdd141e1303008c148857c888cb7b12"} Feb 02 09:07:11 crc kubenswrapper[4747]: I0202 09:07:11.906356 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" event={"ID":"cf0b4df2-65a8-4cfa-a77f-a52634ce2b49","Type":"ContainerStarted","Data":"28f0a5fb4934a908e7103675e8f06d93861f073141de9b683cfed432d7a2cecb"} Feb 02 09:07:11 crc kubenswrapper[4747]: I0202 09:07:11.906950 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:11 crc kubenswrapper[4747]: I0202 09:07:11.935604 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" podStartSLOduration=1.917181909 podStartE2EDuration="4.935574991s" podCreationTimestamp="2026-02-02 09:07:07 +0000 UTC" firstStartedPulling="2026-02-02 09:07:07.756501143 +0000 UTC m=+640.300839576" lastFinishedPulling="2026-02-02 09:07:10.774894225 +0000 UTC m=+643.319232658" observedRunningTime="2026-02-02 09:07:11.935344635 +0000 UTC m=+644.479683078" watchObservedRunningTime="2026-02-02 09:07:11.935574991 +0000 UTC m=+644.479913414" Feb 02 09:07:12 crc kubenswrapper[4747]: I0202 09:07:12.912080 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" event={"ID":"ba335bdb-560d-4051-8f28-89dab7f4f9cb","Type":"ContainerStarted","Data":"0ae4d334968bfaa5c85af9e40d1cc8d9a161cd1bf75c704c853115e78cd1c2d0"} Feb 02 09:07:12 crc kubenswrapper[4747]: I0202 09:07:12.934113 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" podStartSLOduration=1.404561505 podStartE2EDuration="5.934093651s" podCreationTimestamp="2026-02-02 09:07:07 +0000 UTC" firstStartedPulling="2026-02-02 09:07:08.173315731 +0000 UTC m=+640.717654204" lastFinishedPulling="2026-02-02 09:07:12.702847917 +0000 UTC m=+645.247186350" observedRunningTime="2026-02-02 09:07:12.930277416 +0000 UTC m=+645.474615839" watchObservedRunningTime="2026-02-02 09:07:12.934093651 +0000 UTC m=+645.478432084" Feb 02 09:07:13 crc kubenswrapper[4747]: I0202 09:07:13.916909 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:27 crc kubenswrapper[4747]: I0202 09:07:27.775068 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6d99b6746d-8rlqn" Feb 02 09:07:47 crc kubenswrapper[4747]: I0202 09:07:47.507362 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6bf46c8785-7g5mj" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.175191 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx"] Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.176051 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.177951 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-lbkn2" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.178151 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.185650 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-xrd4k"] Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.189985 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.192550 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.192785 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx"] Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.194479 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.298073 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-dxs77"] Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.299415 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.302043 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.302281 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.302590 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-dmc26" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.302747 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.313266 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-mmkc8"] Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.314306 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.317500 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.333144 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-mmkc8"] Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342540 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-sockets\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342607 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics-certs\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342655 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-reloader\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342681 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4b0b5a28-658a-4507-a310-e983c5ef57db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-ms2gx\" (UID: \"4b0b5a28-658a-4507-a310-e983c5ef57db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342717 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j62sr\" (UniqueName: \"kubernetes.io/projected/4b0b5a28-658a-4507-a310-e983c5ef57db-kube-api-access-j62sr\") pod \"frr-k8s-webhook-server-7df86c4f6c-ms2gx\" (UID: \"4b0b5a28-658a-4507-a310-e983c5ef57db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342742 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6vzh\" (UniqueName: \"kubernetes.io/projected/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-kube-api-access-v6vzh\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342806 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-startup\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342822 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-conf\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.342847 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444362 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j62sr\" (UniqueName: \"kubernetes.io/projected/4b0b5a28-658a-4507-a310-e983c5ef57db-kube-api-access-j62sr\") pod \"frr-k8s-webhook-server-7df86c4f6c-ms2gx\" (UID: \"4b0b5a28-658a-4507-a310-e983c5ef57db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444424 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6vzh\" (UniqueName: \"kubernetes.io/projected/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-kube-api-access-v6vzh\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444458 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-metrics-certs\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444479 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rwbg\" (UniqueName: \"kubernetes.io/projected/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-kube-api-access-5rwbg\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444536 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-startup\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-conf\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444726 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444821 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-sockets\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444891 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-metrics-certs\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.444927 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-conf\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445006 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-metallb-excludel2\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445087 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445129 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5hx7\" (UniqueName: \"kubernetes.io/projected/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-kube-api-access-n5hx7\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445170 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics-certs\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445197 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445306 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-reloader\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445321 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-sockets\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445359 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-cert\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445429 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4b0b5a28-658a-4507-a310-e983c5ef57db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-ms2gx\" (UID: \"4b0b5a28-658a-4507-a310-e983c5ef57db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: E0202 09:07:48.445597 4747 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445599 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-reloader\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: E0202 09:07:48.445632 4747 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Feb 02 09:07:48 crc kubenswrapper[4747]: E0202 09:07:48.445650 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b0b5a28-658a-4507-a310-e983c5ef57db-cert podName:4b0b5a28-658a-4507-a310-e983c5ef57db nodeName:}" failed. No retries permitted until 2026-02-02 09:07:48.945634533 +0000 UTC m=+681.489972966 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4b0b5a28-658a-4507-a310-e983c5ef57db-cert") pod "frr-k8s-webhook-server-7df86c4f6c-ms2gx" (UID: "4b0b5a28-658a-4507-a310-e983c5ef57db") : secret "frr-k8s-webhook-server-cert" not found Feb 02 09:07:48 crc kubenswrapper[4747]: E0202 09:07:48.445737 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics-certs podName:28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f nodeName:}" failed. No retries permitted until 2026-02-02 09:07:48.945713595 +0000 UTC m=+681.490052048 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics-certs") pod "frr-k8s-xrd4k" (UID: "28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f") : secret "frr-k8s-certs-secret" not found Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.445849 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-frr-startup\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.472272 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j62sr\" (UniqueName: \"kubernetes.io/projected/4b0b5a28-658a-4507-a310-e983c5ef57db-kube-api-access-j62sr\") pod \"frr-k8s-webhook-server-7df86c4f6c-ms2gx\" (UID: \"4b0b5a28-658a-4507-a310-e983c5ef57db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.474213 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6vzh\" (UniqueName: \"kubernetes.io/projected/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-kube-api-access-v6vzh\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.547124 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-metrics-certs\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.547177 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.547200 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-metallb-excludel2\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.547229 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5hx7\" (UniqueName: \"kubernetes.io/projected/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-kube-api-access-n5hx7\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.547290 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-cert\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: E0202 09:07:48.547308 4747 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.547347 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-metrics-certs\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: E0202 09:07:48.547363 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist podName:ec6bb1cb-4fbe-477e-8635-42c2f087c3d4 nodeName:}" failed. No retries permitted until 2026-02-02 09:07:49.047347094 +0000 UTC m=+681.591685527 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist") pod "speaker-dxs77" (UID: "ec6bb1cb-4fbe-477e-8635-42c2f087c3d4") : secret "metallb-memberlist" not found Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.547379 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rwbg\" (UniqueName: \"kubernetes.io/projected/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-kube-api-access-5rwbg\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.548368 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-metallb-excludel2\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.549104 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.550774 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-metrics-certs\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.554994 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-metrics-certs\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.560797 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-cert\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.570439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5hx7\" (UniqueName: \"kubernetes.io/projected/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-kube-api-access-n5hx7\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.570742 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rwbg\" (UniqueName: \"kubernetes.io/projected/cae8a10b-ff3b-42ac-b7a3-326f049a49ba-kube-api-access-5rwbg\") pod \"controller-6968d8fdc4-mmkc8\" (UID: \"cae8a10b-ff3b-42ac-b7a3-326f049a49ba\") " pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.626343 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.952829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics-certs\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.952979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4b0b5a28-658a-4507-a310-e983c5ef57db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-ms2gx\" (UID: \"4b0b5a28-658a-4507-a310-e983c5ef57db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.956871 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4b0b5a28-658a-4507-a310-e983c5ef57db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-ms2gx\" (UID: \"4b0b5a28-658a-4507-a310-e983c5ef57db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:48 crc kubenswrapper[4747]: I0202 09:07:48.957354 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f-metrics-certs\") pod \"frr-k8s-xrd4k\" (UID: \"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f\") " pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:49 crc kubenswrapper[4747]: I0202 09:07:49.033962 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-mmkc8"] Feb 02 09:07:49 crc kubenswrapper[4747]: W0202 09:07:49.043924 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcae8a10b_ff3b_42ac_b7a3_326f049a49ba.slice/crio-bbd36270ab1871109a20d9b5900ac51f79f63702d3ee615b3d58c396e2b8e587 WatchSource:0}: Error finding container bbd36270ab1871109a20d9b5900ac51f79f63702d3ee615b3d58c396e2b8e587: Status 404 returned error can't find the container with id bbd36270ab1871109a20d9b5900ac51f79f63702d3ee615b3d58c396e2b8e587 Feb 02 09:07:49 crc kubenswrapper[4747]: I0202 09:07:49.053982 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:49 crc kubenswrapper[4747]: E0202 09:07:49.054183 4747 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 09:07:49 crc kubenswrapper[4747]: E0202 09:07:49.054258 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist podName:ec6bb1cb-4fbe-477e-8635-42c2f087c3d4 nodeName:}" failed. No retries permitted until 2026-02-02 09:07:50.054239557 +0000 UTC m=+682.598577990 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist") pod "speaker-dxs77" (UID: "ec6bb1cb-4fbe-477e-8635-42c2f087c3d4") : secret "metallb-memberlist" not found Feb 02 09:07:49 crc kubenswrapper[4747]: I0202 09:07:49.092175 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:49 crc kubenswrapper[4747]: I0202 09:07:49.106946 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:07:49 crc kubenswrapper[4747]: I0202 09:07:49.119170 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-mmkc8" event={"ID":"cae8a10b-ff3b-42ac-b7a3-326f049a49ba","Type":"ContainerStarted","Data":"bbd36270ab1871109a20d9b5900ac51f79f63702d3ee615b3d58c396e2b8e587"} Feb 02 09:07:49 crc kubenswrapper[4747]: I0202 09:07:49.289915 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx"] Feb 02 09:07:49 crc kubenswrapper[4747]: W0202 09:07:49.297577 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b0b5a28_658a_4507_a310_e983c5ef57db.slice/crio-4786a9bc81b87612c4506b4dc237c40c6014f17548cd1421443f1bf1ae86c7a1 WatchSource:0}: Error finding container 4786a9bc81b87612c4506b4dc237c40c6014f17548cd1421443f1bf1ae86c7a1: Status 404 returned error can't find the container with id 4786a9bc81b87612c4506b4dc237c40c6014f17548cd1421443f1bf1ae86c7a1 Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.069116 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.076627 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/ec6bb1cb-4fbe-477e-8635-42c2f087c3d4-memberlist\") pod \"speaker-dxs77\" (UID: \"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4\") " pod="metallb-system/speaker-dxs77" Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.111828 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dxs77" Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.124392 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" event={"ID":"4b0b5a28-658a-4507-a310-e983c5ef57db","Type":"ContainerStarted","Data":"4786a9bc81b87612c4506b4dc237c40c6014f17548cd1421443f1bf1ae86c7a1"} Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.125537 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerStarted","Data":"f56c5c794fd9ef36d8554f418746b5787ec580aaca0e5dba83ed8b4bdfeb97a9"} Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.127071 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-mmkc8" event={"ID":"cae8a10b-ff3b-42ac-b7a3-326f049a49ba","Type":"ContainerStarted","Data":"e1ee349b0c2b9c2b313211d532cf140d89b5db9b57144676054c74ae7c8b5670"} Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.127096 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-mmkc8" event={"ID":"cae8a10b-ff3b-42ac-b7a3-326f049a49ba","Type":"ContainerStarted","Data":"d93aefb331e2e2114319e7cfc371363059cf52011c28ecfa64b547d0b4a621cd"} Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.127243 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:07:50 crc kubenswrapper[4747]: I0202 09:07:50.145244 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-mmkc8" podStartSLOduration=2.145228616 podStartE2EDuration="2.145228616s" podCreationTimestamp="2026-02-02 09:07:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:07:50.145194905 +0000 UTC m=+682.689533358" watchObservedRunningTime="2026-02-02 09:07:50.145228616 +0000 UTC m=+682.689567049" Feb 02 09:07:51 crc kubenswrapper[4747]: I0202 09:07:51.134794 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dxs77" event={"ID":"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4","Type":"ContainerStarted","Data":"623a780f015017016eae9911647f72aa986e1cb2e8eebe63d299597843dc06b5"} Feb 02 09:07:51 crc kubenswrapper[4747]: I0202 09:07:51.135191 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dxs77" event={"ID":"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4","Type":"ContainerStarted","Data":"f1cf0ba3e23ecec7db6d1ad408dc49cc529e12fbab783e3f4eb2b38a478ad82c"} Feb 02 09:07:51 crc kubenswrapper[4747]: I0202 09:07:51.135204 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dxs77" event={"ID":"ec6bb1cb-4fbe-477e-8635-42c2f087c3d4","Type":"ContainerStarted","Data":"0f98cbe2676ab48eb0e58e5e0387f3247f74e64ccc5830894184afe8d305736f"} Feb 02 09:07:51 crc kubenswrapper[4747]: I0202 09:07:51.137071 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-dxs77" Feb 02 09:07:51 crc kubenswrapper[4747]: I0202 09:07:51.161293 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-dxs77" podStartSLOduration=3.161270518 podStartE2EDuration="3.161270518s" podCreationTimestamp="2026-02-02 09:07:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:07:51.156252164 +0000 UTC m=+683.700590597" watchObservedRunningTime="2026-02-02 09:07:51.161270518 +0000 UTC m=+683.705608951" Feb 02 09:07:58 crc kubenswrapper[4747]: I0202 09:07:58.218866 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" event={"ID":"4b0b5a28-658a-4507-a310-e983c5ef57db","Type":"ContainerStarted","Data":"0df429f17e1a5565685a30a3d46a26d5fd9a80ac25f9bda93a30f635b23c1868"} Feb 02 09:07:58 crc kubenswrapper[4747]: I0202 09:07:58.219358 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:07:58 crc kubenswrapper[4747]: I0202 09:07:58.222246 4747 generic.go:334] "Generic (PLEG): container finished" podID="28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f" containerID="8d52be2106f76cd1e92619ee929c14e9e8f7e1f705b5ab087b1dd9cc439f58d6" exitCode=0 Feb 02 09:07:58 crc kubenswrapper[4747]: I0202 09:07:58.222297 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerDied","Data":"8d52be2106f76cd1e92619ee929c14e9e8f7e1f705b5ab087b1dd9cc439f58d6"} Feb 02 09:07:58 crc kubenswrapper[4747]: I0202 09:07:58.244617 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" podStartSLOduration=1.606690859 podStartE2EDuration="10.244591492s" podCreationTimestamp="2026-02-02 09:07:48 +0000 UTC" firstStartedPulling="2026-02-02 09:07:49.301469304 +0000 UTC m=+681.845807737" lastFinishedPulling="2026-02-02 09:07:57.939369937 +0000 UTC m=+690.483708370" observedRunningTime="2026-02-02 09:07:58.238789578 +0000 UTC m=+690.783128021" watchObservedRunningTime="2026-02-02 09:07:58.244591492 +0000 UTC m=+690.788929915" Feb 02 09:07:59 crc kubenswrapper[4747]: I0202 09:07:59.232172 4747 generic.go:334] "Generic (PLEG): container finished" podID="28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f" containerID="89fd1efaf543f192035bc9dff4966d92ebb399269d908e53e7f9759fdaa1954a" exitCode=0 Feb 02 09:07:59 crc kubenswrapper[4747]: I0202 09:07:59.232318 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerDied","Data":"89fd1efaf543f192035bc9dff4966d92ebb399269d908e53e7f9759fdaa1954a"} Feb 02 09:08:00 crc kubenswrapper[4747]: I0202 09:08:00.123905 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-dxs77" Feb 02 09:08:00 crc kubenswrapper[4747]: I0202 09:08:00.252830 4747 generic.go:334] "Generic (PLEG): container finished" podID="28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f" containerID="e4d87d7b76c9a65f396b0135f31894bc67d0f7eb5298b6b896dfb80da5480da8" exitCode=0 Feb 02 09:08:00 crc kubenswrapper[4747]: I0202 09:08:00.252871 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerDied","Data":"e4d87d7b76c9a65f396b0135f31894bc67d0f7eb5298b6b896dfb80da5480da8"} Feb 02 09:08:01 crc kubenswrapper[4747]: I0202 09:08:01.266581 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerStarted","Data":"7420add564300a618cb00cbce843152b295e003fcae9febc0d9d29945b017904"} Feb 02 09:08:01 crc kubenswrapper[4747]: I0202 09:08:01.266845 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerStarted","Data":"3a4f77e61a4f20a61fed68b01c06025c91447a957b9f8c100027938b191b6c0e"} Feb 02 09:08:01 crc kubenswrapper[4747]: I0202 09:08:01.266855 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerStarted","Data":"c20caf42548fd8a6ceb7a9d502deaf14848b69e67ce6ce3143946eaa517348a8"} Feb 02 09:08:01 crc kubenswrapper[4747]: I0202 09:08:01.266863 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerStarted","Data":"a5e89b44fb947937e2c1c4deacd42a01c2f89eb1ac51606d8b4970d715f49fa5"} Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.277645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerStarted","Data":"cc94529f169b9ca59cf67f2a706faaaf65664851cb1ceb34d12dda660e87eb27"} Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.277968 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-xrd4k" event={"ID":"28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f","Type":"ContainerStarted","Data":"d00194d3ef9466fbef7d9b0d5f7cbe14cff67dee8e33c8d0b0e4e95806f968f5"} Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.277983 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.300508 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-xrd4k" podStartSLOduration=5.563139446 podStartE2EDuration="14.300493255s" podCreationTimestamp="2026-02-02 09:07:48 +0000 UTC" firstStartedPulling="2026-02-02 09:07:49.221243175 +0000 UTC m=+681.765581618" lastFinishedPulling="2026-02-02 09:07:57.958596984 +0000 UTC m=+690.502935427" observedRunningTime="2026-02-02 09:08:02.298508455 +0000 UTC m=+694.842846898" watchObservedRunningTime="2026-02-02 09:08:02.300493255 +0000 UTC m=+694.844831688" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.742467 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-45h5k"] Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.753249 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-45h5k" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.759133 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.759428 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-xw4hv" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.759619 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.773754 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-45h5k"] Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.863466 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsnrg\" (UniqueName: \"kubernetes.io/projected/f1661a81-eff1-4f39-ba4e-847598c89c65-kube-api-access-gsnrg\") pod \"openstack-operator-index-45h5k\" (UID: \"f1661a81-eff1-4f39-ba4e-847598c89c65\") " pod="openstack-operators/openstack-operator-index-45h5k" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.965205 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsnrg\" (UniqueName: \"kubernetes.io/projected/f1661a81-eff1-4f39-ba4e-847598c89c65-kube-api-access-gsnrg\") pod \"openstack-operator-index-45h5k\" (UID: \"f1661a81-eff1-4f39-ba4e-847598c89c65\") " pod="openstack-operators/openstack-operator-index-45h5k" Feb 02 09:08:02 crc kubenswrapper[4747]: I0202 09:08:02.987089 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsnrg\" (UniqueName: \"kubernetes.io/projected/f1661a81-eff1-4f39-ba4e-847598c89c65-kube-api-access-gsnrg\") pod \"openstack-operator-index-45h5k\" (UID: \"f1661a81-eff1-4f39-ba4e-847598c89c65\") " pod="openstack-operators/openstack-operator-index-45h5k" Feb 02 09:08:03 crc kubenswrapper[4747]: I0202 09:08:03.084462 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-45h5k" Feb 02 09:08:03 crc kubenswrapper[4747]: I0202 09:08:03.520224 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-45h5k"] Feb 02 09:08:03 crc kubenswrapper[4747]: W0202 09:08:03.521321 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1661a81_eff1_4f39_ba4e_847598c89c65.slice/crio-83fd973c90279b13a750e08f7d3929d7f52cf5502d30221c1c0529341f07b99c WatchSource:0}: Error finding container 83fd973c90279b13a750e08f7d3929d7f52cf5502d30221c1c0529341f07b99c: Status 404 returned error can't find the container with id 83fd973c90279b13a750e08f7d3929d7f52cf5502d30221c1c0529341f07b99c Feb 02 09:08:04 crc kubenswrapper[4747]: I0202 09:08:04.111189 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:08:04 crc kubenswrapper[4747]: I0202 09:08:04.158414 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:08:04 crc kubenswrapper[4747]: I0202 09:08:04.290017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-45h5k" event={"ID":"f1661a81-eff1-4f39-ba4e-847598c89c65","Type":"ContainerStarted","Data":"83fd973c90279b13a750e08f7d3929d7f52cf5502d30221c1c0529341f07b99c"} Feb 02 09:08:05 crc kubenswrapper[4747]: I0202 09:08:05.925080 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-45h5k"] Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.307027 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-45h5k" event={"ID":"f1661a81-eff1-4f39-ba4e-847598c89c65","Type":"ContainerStarted","Data":"0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94"} Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.307480 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-45h5k" podUID="f1661a81-eff1-4f39-ba4e-847598c89c65" containerName="registry-server" containerID="cri-o://0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94" gracePeriod=2 Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.537249 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-45h5k" podStartSLOduration=2.219437073 podStartE2EDuration="4.537231139s" podCreationTimestamp="2026-02-02 09:08:02 +0000 UTC" firstStartedPulling="2026-02-02 09:08:03.529067403 +0000 UTC m=+696.073405856" lastFinishedPulling="2026-02-02 09:08:05.846861489 +0000 UTC m=+698.391199922" observedRunningTime="2026-02-02 09:08:06.325329797 +0000 UTC m=+698.869668240" watchObservedRunningTime="2026-02-02 09:08:06.537231139 +0000 UTC m=+699.081569572" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.552710 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2k92r"] Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.553884 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.574018 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2k92r"] Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.630462 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp5d2\" (UniqueName: \"kubernetes.io/projected/b2cdcde8-4fea-4642-954f-eb13afd581f7-kube-api-access-hp5d2\") pod \"openstack-operator-index-2k92r\" (UID: \"b2cdcde8-4fea-4642-954f-eb13afd581f7\") " pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.731572 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp5d2\" (UniqueName: \"kubernetes.io/projected/b2cdcde8-4fea-4642-954f-eb13afd581f7-kube-api-access-hp5d2\") pod \"openstack-operator-index-2k92r\" (UID: \"b2cdcde8-4fea-4642-954f-eb13afd581f7\") " pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.740958 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-45h5k" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.753432 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp5d2\" (UniqueName: \"kubernetes.io/projected/b2cdcde8-4fea-4642-954f-eb13afd581f7-kube-api-access-hp5d2\") pod \"openstack-operator-index-2k92r\" (UID: \"b2cdcde8-4fea-4642-954f-eb13afd581f7\") " pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.832770 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsnrg\" (UniqueName: \"kubernetes.io/projected/f1661a81-eff1-4f39-ba4e-847598c89c65-kube-api-access-gsnrg\") pod \"f1661a81-eff1-4f39-ba4e-847598c89c65\" (UID: \"f1661a81-eff1-4f39-ba4e-847598c89c65\") " Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.837577 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1661a81-eff1-4f39-ba4e-847598c89c65-kube-api-access-gsnrg" (OuterVolumeSpecName: "kube-api-access-gsnrg") pod "f1661a81-eff1-4f39-ba4e-847598c89c65" (UID: "f1661a81-eff1-4f39-ba4e-847598c89c65"). InnerVolumeSpecName "kube-api-access-gsnrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.883698 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:06 crc kubenswrapper[4747]: I0202 09:08:06.934740 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsnrg\" (UniqueName: \"kubernetes.io/projected/f1661a81-eff1-4f39-ba4e-847598c89c65-kube-api-access-gsnrg\") on node \"crc\" DevicePath \"\"" Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.092649 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2k92r"] Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.316900 4747 generic.go:334] "Generic (PLEG): container finished" podID="f1661a81-eff1-4f39-ba4e-847598c89c65" containerID="0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94" exitCode=0 Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.317272 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-45h5k" event={"ID":"f1661a81-eff1-4f39-ba4e-847598c89c65","Type":"ContainerDied","Data":"0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94"} Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.317351 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-45h5k" event={"ID":"f1661a81-eff1-4f39-ba4e-847598c89c65","Type":"ContainerDied","Data":"83fd973c90279b13a750e08f7d3929d7f52cf5502d30221c1c0529341f07b99c"} Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.317371 4747 scope.go:117] "RemoveContainer" containerID="0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94" Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.317546 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-45h5k" Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.319257 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2k92r" event={"ID":"b2cdcde8-4fea-4642-954f-eb13afd581f7","Type":"ContainerStarted","Data":"b20b0ddd39c96c4b85ff68c57e7db123f6dae8fc86ca9f46fc0a00aeb7c1e651"} Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.319280 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2k92r" event={"ID":"b2cdcde8-4fea-4642-954f-eb13afd581f7","Type":"ContainerStarted","Data":"31d48a37f9f7c54b749ccef07e2a7494d7d5b417bb278bb2f6e813b82aa78449"} Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.335123 4747 scope.go:117] "RemoveContainer" containerID="0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94" Feb 02 09:08:07 crc kubenswrapper[4747]: E0202 09:08:07.336360 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94\": container with ID starting with 0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94 not found: ID does not exist" containerID="0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94" Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.336403 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94"} err="failed to get container status \"0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94\": rpc error: code = NotFound desc = could not find container \"0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94\": container with ID starting with 0df283e8a40e606ea37aaf9e3a887ecfdb13db694e4ad7d53d5f25bb1ea2da94 not found: ID does not exist" Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.342425 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2k92r" podStartSLOduration=1.298120066 podStartE2EDuration="1.342408164s" podCreationTimestamp="2026-02-02 09:08:06 +0000 UTC" firstStartedPulling="2026-02-02 09:08:07.105126413 +0000 UTC m=+699.649464836" lastFinishedPulling="2026-02-02 09:08:07.149414501 +0000 UTC m=+699.693752934" observedRunningTime="2026-02-02 09:08:07.335734479 +0000 UTC m=+699.880072932" watchObservedRunningTime="2026-02-02 09:08:07.342408164 +0000 UTC m=+699.886746597" Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.350594 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-45h5k"] Feb 02 09:08:07 crc kubenswrapper[4747]: I0202 09:08:07.355463 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-45h5k"] Feb 02 09:08:08 crc kubenswrapper[4747]: I0202 09:08:08.351581 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1661a81-eff1-4f39-ba4e-847598c89c65" path="/var/lib/kubelet/pods/f1661a81-eff1-4f39-ba4e-847598c89c65/volumes" Feb 02 09:08:08 crc kubenswrapper[4747]: I0202 09:08:08.631434 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-mmkc8" Feb 02 09:08:09 crc kubenswrapper[4747]: I0202 09:08:09.100528 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-ms2gx" Feb 02 09:08:16 crc kubenswrapper[4747]: I0202 09:08:16.884567 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:16 crc kubenswrapper[4747]: I0202 09:08:16.885354 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:16 crc kubenswrapper[4747]: I0202 09:08:16.917826 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:17 crc kubenswrapper[4747]: I0202 09:08:17.407753 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-2k92r" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.111579 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-xrd4k" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.170805 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2"] Feb 02 09:08:19 crc kubenswrapper[4747]: E0202 09:08:19.171049 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1661a81-eff1-4f39-ba4e-847598c89c65" containerName="registry-server" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.171062 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1661a81-eff1-4f39-ba4e-847598c89c65" containerName="registry-server" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.171186 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1661a81-eff1-4f39-ba4e-847598c89c65" containerName="registry-server" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.171991 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.174151 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-9n59n" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.183137 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2"] Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.300045 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pscc2\" (UniqueName: \"kubernetes.io/projected/ce418e47-dd96-4628-b622-6083b3955e7b-kube-api-access-pscc2\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.300137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-util\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.300197 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-bundle\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.401141 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pscc2\" (UniqueName: \"kubernetes.io/projected/ce418e47-dd96-4628-b622-6083b3955e7b-kube-api-access-pscc2\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.401217 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-util\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.401253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-bundle\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.401799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-util\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.402311 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-bundle\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.448316 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pscc2\" (UniqueName: \"kubernetes.io/projected/ce418e47-dd96-4628-b622-6083b3955e7b-kube-api-access-pscc2\") pod \"e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.495209 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:19 crc kubenswrapper[4747]: I0202 09:08:19.947044 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2"] Feb 02 09:08:19 crc kubenswrapper[4747]: W0202 09:08:19.957119 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce418e47_dd96_4628_b622_6083b3955e7b.slice/crio-f0ed6b2415b0dc1f16aa70cf4f3011f7d199d60a510c9b735ae8aedbd958f07c WatchSource:0}: Error finding container f0ed6b2415b0dc1f16aa70cf4f3011f7d199d60a510c9b735ae8aedbd958f07c: Status 404 returned error can't find the container with id f0ed6b2415b0dc1f16aa70cf4f3011f7d199d60a510c9b735ae8aedbd958f07c Feb 02 09:08:20 crc kubenswrapper[4747]: I0202 09:08:20.401336 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce418e47-dd96-4628-b622-6083b3955e7b" containerID="30e56cca1145f0543c62be19e753e1cfd5c07f388086f5447353ce5b4f569b7e" exitCode=0 Feb 02 09:08:20 crc kubenswrapper[4747]: I0202 09:08:20.401397 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" event={"ID":"ce418e47-dd96-4628-b622-6083b3955e7b","Type":"ContainerDied","Data":"30e56cca1145f0543c62be19e753e1cfd5c07f388086f5447353ce5b4f569b7e"} Feb 02 09:08:20 crc kubenswrapper[4747]: I0202 09:08:20.401610 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" event={"ID":"ce418e47-dd96-4628-b622-6083b3955e7b","Type":"ContainerStarted","Data":"f0ed6b2415b0dc1f16aa70cf4f3011f7d199d60a510c9b735ae8aedbd958f07c"} Feb 02 09:08:21 crc kubenswrapper[4747]: I0202 09:08:21.410146 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce418e47-dd96-4628-b622-6083b3955e7b" containerID="7385ca51bd95a6bb4f3c8a1533ce7ec8044aabaf0026eb02ce912b802e9701bd" exitCode=0 Feb 02 09:08:21 crc kubenswrapper[4747]: I0202 09:08:21.410274 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" event={"ID":"ce418e47-dd96-4628-b622-6083b3955e7b","Type":"ContainerDied","Data":"7385ca51bd95a6bb4f3c8a1533ce7ec8044aabaf0026eb02ce912b802e9701bd"} Feb 02 09:08:22 crc kubenswrapper[4747]: I0202 09:08:22.422020 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce418e47-dd96-4628-b622-6083b3955e7b" containerID="a74a9c44c704abecb161c9246a154b08599fb51f7c0a4575e9c9dce413f16e15" exitCode=0 Feb 02 09:08:22 crc kubenswrapper[4747]: I0202 09:08:22.422080 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" event={"ID":"ce418e47-dd96-4628-b622-6083b3955e7b","Type":"ContainerDied","Data":"a74a9c44c704abecb161c9246a154b08599fb51f7c0a4575e9c9dce413f16e15"} Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.706574 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.876595 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-util\") pod \"ce418e47-dd96-4628-b622-6083b3955e7b\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.876689 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pscc2\" (UniqueName: \"kubernetes.io/projected/ce418e47-dd96-4628-b622-6083b3955e7b-kube-api-access-pscc2\") pod \"ce418e47-dd96-4628-b622-6083b3955e7b\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.876708 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-bundle\") pod \"ce418e47-dd96-4628-b622-6083b3955e7b\" (UID: \"ce418e47-dd96-4628-b622-6083b3955e7b\") " Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.877676 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-bundle" (OuterVolumeSpecName: "bundle") pod "ce418e47-dd96-4628-b622-6083b3955e7b" (UID: "ce418e47-dd96-4628-b622-6083b3955e7b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.881790 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce418e47-dd96-4628-b622-6083b3955e7b-kube-api-access-pscc2" (OuterVolumeSpecName: "kube-api-access-pscc2") pod "ce418e47-dd96-4628-b622-6083b3955e7b" (UID: "ce418e47-dd96-4628-b622-6083b3955e7b"). InnerVolumeSpecName "kube-api-access-pscc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.890665 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-util" (OuterVolumeSpecName: "util") pod "ce418e47-dd96-4628-b622-6083b3955e7b" (UID: "ce418e47-dd96-4628-b622-6083b3955e7b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.978025 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pscc2\" (UniqueName: \"kubernetes.io/projected/ce418e47-dd96-4628-b622-6083b3955e7b-kube-api-access-pscc2\") on node \"crc\" DevicePath \"\"" Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.978060 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:08:23 crc kubenswrapper[4747]: I0202 09:08:23.978071 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ce418e47-dd96-4628-b622-6083b3955e7b-util\") on node \"crc\" DevicePath \"\"" Feb 02 09:08:24 crc kubenswrapper[4747]: I0202 09:08:24.440442 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" event={"ID":"ce418e47-dd96-4628-b622-6083b3955e7b","Type":"ContainerDied","Data":"f0ed6b2415b0dc1f16aa70cf4f3011f7d199d60a510c9b735ae8aedbd958f07c"} Feb 02 09:08:24 crc kubenswrapper[4747]: I0202 09:08:24.440695 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0ed6b2415b0dc1f16aa70cf4f3011f7d199d60a510c9b735ae8aedbd958f07c" Feb 02 09:08:24 crc kubenswrapper[4747]: I0202 09:08:24.440556 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.403370 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc"] Feb 02 09:08:32 crc kubenswrapper[4747]: E0202 09:08:32.404239 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce418e47-dd96-4628-b622-6083b3955e7b" containerName="util" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.404259 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce418e47-dd96-4628-b622-6083b3955e7b" containerName="util" Feb 02 09:08:32 crc kubenswrapper[4747]: E0202 09:08:32.404274 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce418e47-dd96-4628-b622-6083b3955e7b" containerName="pull" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.404283 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce418e47-dd96-4628-b622-6083b3955e7b" containerName="pull" Feb 02 09:08:32 crc kubenswrapper[4747]: E0202 09:08:32.404300 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce418e47-dd96-4628-b622-6083b3955e7b" containerName="extract" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.404311 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce418e47-dd96-4628-b622-6083b3955e7b" containerName="extract" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.404498 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce418e47-dd96-4628-b622-6083b3955e7b" containerName="extract" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.405147 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.408274 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-9zrrg" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.428327 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc"] Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.493585 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gqtv\" (UniqueName: \"kubernetes.io/projected/8ced9c16-9a58-4416-aadc-23d33fbd8c2c-kube-api-access-6gqtv\") pod \"openstack-operator-controller-init-5b57c84fd5-pljbc\" (UID: \"8ced9c16-9a58-4416-aadc-23d33fbd8c2c\") " pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.594902 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gqtv\" (UniqueName: \"kubernetes.io/projected/8ced9c16-9a58-4416-aadc-23d33fbd8c2c-kube-api-access-6gqtv\") pod \"openstack-operator-controller-init-5b57c84fd5-pljbc\" (UID: \"8ced9c16-9a58-4416-aadc-23d33fbd8c2c\") " pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.634568 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gqtv\" (UniqueName: \"kubernetes.io/projected/8ced9c16-9a58-4416-aadc-23d33fbd8c2c-kube-api-access-6gqtv\") pod \"openstack-operator-controller-init-5b57c84fd5-pljbc\" (UID: \"8ced9c16-9a58-4416-aadc-23d33fbd8c2c\") " pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" Feb 02 09:08:32 crc kubenswrapper[4747]: I0202 09:08:32.730782 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" Feb 02 09:08:33 crc kubenswrapper[4747]: I0202 09:08:33.140642 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc"] Feb 02 09:08:33 crc kubenswrapper[4747]: I0202 09:08:33.503639 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" event={"ID":"8ced9c16-9a58-4416-aadc-23d33fbd8c2c","Type":"ContainerStarted","Data":"bec4db9f15837af189fe069bd2c3b0ed5e2beb97003104fe799cb249ffbc0e6e"} Feb 02 09:08:37 crc kubenswrapper[4747]: I0202 09:08:37.527815 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" event={"ID":"8ced9c16-9a58-4416-aadc-23d33fbd8c2c","Type":"ContainerStarted","Data":"1b74060c861e422fa6b75e7ac94c5e95ac4d32d42f993f765e358d5623cc8d0c"} Feb 02 09:08:37 crc kubenswrapper[4747]: I0202 09:08:37.528342 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" Feb 02 09:08:37 crc kubenswrapper[4747]: I0202 09:08:37.569853 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" podStartSLOduration=2.112175596 podStartE2EDuration="5.569824052s" podCreationTimestamp="2026-02-02 09:08:32 +0000 UTC" firstStartedPulling="2026-02-02 09:08:33.150189735 +0000 UTC m=+725.694528178" lastFinishedPulling="2026-02-02 09:08:36.607838201 +0000 UTC m=+729.152176634" observedRunningTime="2026-02-02 09:08:37.563741674 +0000 UTC m=+730.108080107" watchObservedRunningTime="2026-02-02 09:08:37.569824052 +0000 UTC m=+730.114162495" Feb 02 09:08:42 crc kubenswrapper[4747]: I0202 09:08:42.733698 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-5b57c84fd5-pljbc" Feb 02 09:08:50 crc kubenswrapper[4747]: I0202 09:08:50.518721 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:08:50 crc kubenswrapper[4747]: I0202 09:08:50.519366 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.192482 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.194154 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.196584 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-ndfbl" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.197886 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.198783 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.201928 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-r2fj4" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.203719 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.210250 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.223119 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.224159 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.234273 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-mzktx" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.239839 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.240650 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.244474 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-hf244" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.249354 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.256705 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.280716 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.281433 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.287559 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.287875 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-8r2gb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.288281 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.289450 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-l52bg" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.315495 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.316467 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.321231 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt9q2\" (UniqueName: \"kubernetes.io/projected/db5ae332-d8f3-41a2-9c21-45ff5536cbb8-kube-api-access-dt9q2\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-zqj92\" (UID: \"db5ae332-d8f3-41a2-9c21-45ff5536cbb8\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.321299 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kg5r\" (UniqueName: \"kubernetes.io/projected/4923ca42-a876-42cd-b992-21573dde4361-kube-api-access-8kg5r\") pod \"cinder-operator-controller-manager-8d874c8fc-7kw8z\" (UID: \"4923ca42-a876-42cd-b992-21573dde4361\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.321499 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrwzg\" (UniqueName: \"kubernetes.io/projected/38cdc2d7-bf0e-499c-9953-5f3088714675-kube-api-access-wrwzg\") pod \"heat-operator-controller-manager-69d6db494d-hqm5n\" (UID: \"38cdc2d7-bf0e-499c-9953-5f3088714675\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.321623 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgqbb\" (UniqueName: \"kubernetes.io/projected/da38d39a-f91a-42d4-a773-5fd894e74305-kube-api-access-tgqbb\") pod \"designate-operator-controller-manager-6d9697b7f4-gg5lg\" (UID: \"da38d39a-f91a-42d4-a773-5fd894e74305\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.322384 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-2n9xt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.322410 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.332449 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.338891 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.357464 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.358115 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.366661 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-lllgt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.373663 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.396718 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.418021 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.421468 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.426009 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4j4z\" (UniqueName: \"kubernetes.io/projected/9069468d-21ec-4ca1-8c03-e35555180a9a-kube-api-access-m4j4z\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.426098 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vppm6\" (UniqueName: \"kubernetes.io/projected/c01d1e85-b676-404c-8565-900de1d7b9ff-kube-api-access-vppm6\") pod \"horizon-operator-controller-manager-5fb775575f-nmsdj\" (UID: \"c01d1e85-b676-404c-8565-900de1d7b9ff\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.426131 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt9q2\" (UniqueName: \"kubernetes.io/projected/db5ae332-d8f3-41a2-9c21-45ff5536cbb8-kube-api-access-dt9q2\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-zqj92\" (UID: \"db5ae332-d8f3-41a2-9c21-45ff5536cbb8\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.426168 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kg5r\" (UniqueName: \"kubernetes.io/projected/4923ca42-a876-42cd-b992-21573dde4361-kube-api-access-8kg5r\") pod \"cinder-operator-controller-manager-8d874c8fc-7kw8z\" (UID: \"4923ca42-a876-42cd-b992-21573dde4361\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.426191 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgzwc\" (UniqueName: \"kubernetes.io/projected/d2a8dd02-a258-40be-ae2b-8c4d8f093870-kube-api-access-lgzwc\") pod \"glance-operator-controller-manager-8886f4c47-g5qsx\" (UID: \"d2a8dd02-a258-40be-ae2b-8c4d8f093870\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.426207 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrwzg\" (UniqueName: \"kubernetes.io/projected/38cdc2d7-bf0e-499c-9953-5f3088714675-kube-api-access-wrwzg\") pod \"heat-operator-controller-manager-69d6db494d-hqm5n\" (UID: \"38cdc2d7-bf0e-499c-9953-5f3088714675\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.426335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgqbb\" (UniqueName: \"kubernetes.io/projected/da38d39a-f91a-42d4-a773-5fd894e74305-kube-api-access-tgqbb\") pod \"designate-operator-controller-manager-6d9697b7f4-gg5lg\" (UID: \"da38d39a-f91a-42d4-a773-5fd894e74305\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.430829 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-ddft8" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.431031 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.459349 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt9q2\" (UniqueName: \"kubernetes.io/projected/db5ae332-d8f3-41a2-9c21-45ff5536cbb8-kube-api-access-dt9q2\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-zqj92\" (UID: \"db5ae332-d8f3-41a2-9c21-45ff5536cbb8\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.459584 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgqbb\" (UniqueName: \"kubernetes.io/projected/da38d39a-f91a-42d4-a773-5fd894e74305-kube-api-access-tgqbb\") pod \"designate-operator-controller-manager-6d9697b7f4-gg5lg\" (UID: \"da38d39a-f91a-42d4-a773-5fd894e74305\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.470608 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrwzg\" (UniqueName: \"kubernetes.io/projected/38cdc2d7-bf0e-499c-9953-5f3088714675-kube-api-access-wrwzg\") pod \"heat-operator-controller-manager-69d6db494d-hqm5n\" (UID: \"38cdc2d7-bf0e-499c-9953-5f3088714675\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.470691 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.471708 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.480804 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-5brrf" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.482608 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kg5r\" (UniqueName: \"kubernetes.io/projected/4923ca42-a876-42cd-b992-21573dde4361-kube-api-access-8kg5r\") pod \"cinder-operator-controller-manager-8d874c8fc-7kw8z\" (UID: \"4923ca42-a876-42cd-b992-21573dde4361\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.493299 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.505961 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.507021 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.510988 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-kj677" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.526285 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.531857 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.533867 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9nbx\" (UniqueName: \"kubernetes.io/projected/1c1d1d29-d3e3-4569-a8ef-f68a5dee0242-kube-api-access-j9nbx\") pod \"keystone-operator-controller-manager-84f48565d4-2rmn5\" (UID: \"1c1d1d29-d3e3-4569-a8ef-f68a5dee0242\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.533912 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vppm6\" (UniqueName: \"kubernetes.io/projected/c01d1e85-b676-404c-8565-900de1d7b9ff-kube-api-access-vppm6\") pod \"horizon-operator-controller-manager-5fb775575f-nmsdj\" (UID: \"c01d1e85-b676-404c-8565-900de1d7b9ff\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.533957 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgzwc\" (UniqueName: \"kubernetes.io/projected/d2a8dd02-a258-40be-ae2b-8c4d8f093870-kube-api-access-lgzwc\") pod \"glance-operator-controller-manager-8886f4c47-g5qsx\" (UID: \"d2a8dd02-a258-40be-ae2b-8c4d8f093870\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.533982 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns8md\" (UniqueName: \"kubernetes.io/projected/e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3-kube-api-access-ns8md\") pod \"mariadb-operator-controller-manager-67bf948998-6fx7k\" (UID: \"e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.534036 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.534060 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h5cp\" (UniqueName: \"kubernetes.io/projected/2343f1c6-2d43-468e-a2ea-7e6b2b915cec-kube-api-access-7h5cp\") pod \"manila-operator-controller-manager-7dd968899f-lqfh2\" (UID: \"2343f1c6-2d43-468e-a2ea-7e6b2b915cec\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.534080 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xl47\" (UniqueName: \"kubernetes.io/projected/e7700f72-6bbd-4ba3-9835-3665f7b1df89-kube-api-access-5xl47\") pod \"ironic-operator-controller-manager-5f4b8bd54d-rtv8v\" (UID: \"e7700f72-6bbd-4ba3-9835-3665f7b1df89\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.534109 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4j4z\" (UniqueName: \"kubernetes.io/projected/9069468d-21ec-4ca1-8c03-e35555180a9a-kube-api-access-m4j4z\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:02 crc kubenswrapper[4747]: E0202 09:09:02.535795 4747 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:02 crc kubenswrapper[4747]: E0202 09:09:02.535844 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert podName:9069468d-21ec-4ca1-8c03-e35555180a9a nodeName:}" failed. No retries permitted until 2026-02-02 09:09:03.03582877 +0000 UTC m=+755.580167203 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert") pod "infra-operator-controller-manager-79955696d6-qlc6w" (UID: "9069468d-21ec-4ca1-8c03-e35555180a9a") : secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.539810 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.540868 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.541797 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.548412 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-5mp6g" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.562490 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.566294 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vppm6\" (UniqueName: \"kubernetes.io/projected/c01d1e85-b676-404c-8565-900de1d7b9ff-kube-api-access-vppm6\") pod \"horizon-operator-controller-manager-5fb775575f-nmsdj\" (UID: \"c01d1e85-b676-404c-8565-900de1d7b9ff\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.577158 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4j4z\" (UniqueName: \"kubernetes.io/projected/9069468d-21ec-4ca1-8c03-e35555180a9a-kube-api-access-m4j4z\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.577353 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgzwc\" (UniqueName: \"kubernetes.io/projected/d2a8dd02-a258-40be-ae2b-8c4d8f093870-kube-api-access-lgzwc\") pod \"glance-operator-controller-manager-8886f4c47-g5qsx\" (UID: \"d2a8dd02-a258-40be-ae2b-8c4d8f093870\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.586064 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.586872 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.588281 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-cnzkw" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.590018 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.598909 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.606857 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.612086 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.616720 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.617666 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.618353 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.619660 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-q8qgv" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.632355 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.635731 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xl47\" (UniqueName: \"kubernetes.io/projected/e7700f72-6bbd-4ba3-9835-3665f7b1df89-kube-api-access-5xl47\") pod \"ironic-operator-controller-manager-5f4b8bd54d-rtv8v\" (UID: \"e7700f72-6bbd-4ba3-9835-3665f7b1df89\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.635765 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jskw9\" (UniqueName: \"kubernetes.io/projected/200b1a45-bbca-460b-a578-2c913f0075f9-kube-api-access-jskw9\") pod \"neutron-operator-controller-manager-585dbc889-66cdn\" (UID: \"200b1a45-bbca-460b-a578-2c913f0075f9\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.635810 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp2jg\" (UniqueName: \"kubernetes.io/projected/2def646c-f3ea-46d4-9003-ea05abd176d7-kube-api-access-mp2jg\") pod \"octavia-operator-controller-manager-6687f8d877-ccljn\" (UID: \"2def646c-f3ea-46d4-9003-ea05abd176d7\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.635845 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9nbx\" (UniqueName: \"kubernetes.io/projected/1c1d1d29-d3e3-4569-a8ef-f68a5dee0242-kube-api-access-j9nbx\") pod \"keystone-operator-controller-manager-84f48565d4-2rmn5\" (UID: \"1c1d1d29-d3e3-4569-a8ef-f68a5dee0242\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.635898 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns8md\" (UniqueName: \"kubernetes.io/projected/e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3-kube-api-access-ns8md\") pod \"mariadb-operator-controller-manager-67bf948998-6fx7k\" (UID: \"e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.635922 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwt5b\" (UniqueName: \"kubernetes.io/projected/2cddc38a-7d58-47ec-a296-b0447a8b67c4-kube-api-access-zwt5b\") pod \"nova-operator-controller-manager-55bff696bd-hhlsd\" (UID: \"2cddc38a-7d58-47ec-a296-b0447a8b67c4\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.636030 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h5cp\" (UniqueName: \"kubernetes.io/projected/2343f1c6-2d43-468e-a2ea-7e6b2b915cec-kube-api-access-7h5cp\") pod \"manila-operator-controller-manager-7dd968899f-lqfh2\" (UID: \"2343f1c6-2d43-468e-a2ea-7e6b2b915cec\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.641132 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.642061 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.643794 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.645402 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-ht7pb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.652599 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xl47\" (UniqueName: \"kubernetes.io/projected/e7700f72-6bbd-4ba3-9835-3665f7b1df89-kube-api-access-5xl47\") pod \"ironic-operator-controller-manager-5f4b8bd54d-rtv8v\" (UID: \"e7700f72-6bbd-4ba3-9835-3665f7b1df89\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.661074 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h5cp\" (UniqueName: \"kubernetes.io/projected/2343f1c6-2d43-468e-a2ea-7e6b2b915cec-kube-api-access-7h5cp\") pod \"manila-operator-controller-manager-7dd968899f-lqfh2\" (UID: \"2343f1c6-2d43-468e-a2ea-7e6b2b915cec\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.661464 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.661468 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9nbx\" (UniqueName: \"kubernetes.io/projected/1c1d1d29-d3e3-4569-a8ef-f68a5dee0242-kube-api-access-j9nbx\") pod \"keystone-operator-controller-manager-84f48565d4-2rmn5\" (UID: \"1c1d1d29-d3e3-4569-a8ef-f68a5dee0242\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.662239 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.665992 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns8md\" (UniqueName: \"kubernetes.io/projected/e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3-kube-api-access-ns8md\") pod \"mariadb-operator-controller-manager-67bf948998-6fx7k\" (UID: \"e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.666219 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-7r29p" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.669978 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.670819 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.683840 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-w7xjs" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.684053 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.688222 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.704831 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.717616 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.723771 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.724759 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.727275 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-jqq7p" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.729528 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737001 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jskw9\" (UniqueName: \"kubernetes.io/projected/200b1a45-bbca-460b-a578-2c913f0075f9-kube-api-access-jskw9\") pod \"neutron-operator-controller-manager-585dbc889-66cdn\" (UID: \"200b1a45-bbca-460b-a578-2c913f0075f9\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737056 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp2jg\" (UniqueName: \"kubernetes.io/projected/2def646c-f3ea-46d4-9003-ea05abd176d7-kube-api-access-mp2jg\") pod \"octavia-operator-controller-manager-6687f8d877-ccljn\" (UID: \"2def646c-f3ea-46d4-9003-ea05abd176d7\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737091 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42zzs\" (UniqueName: \"kubernetes.io/projected/409bac45-bf99-4268-aa3c-19e9d7392932-kube-api-access-42zzs\") pod \"swift-operator-controller-manager-68fc8c869-np7nt\" (UID: \"409bac45-bf99-4268-aa3c-19e9d7392932\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737119 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737157 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nfvv\" (UniqueName: \"kubernetes.io/projected/cbb834cd-ddc7-462f-8b67-d8466ce5f53e-kube-api-access-5nfvv\") pod \"ovn-operator-controller-manager-788c46999f-5gqtj\" (UID: \"cbb834cd-ddc7-462f-8b67-d8466ce5f53e\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737191 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz6pq\" (UniqueName: \"kubernetes.io/projected/b9cdd96d-49df-489b-9e07-0529338f4b78-kube-api-access-pz6pq\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737215 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj9pc\" (UniqueName: \"kubernetes.io/projected/b90291c1-9921-431d-b77d-4196cb5219df-kube-api-access-hj9pc\") pod \"placement-operator-controller-manager-5b964cf4cd-fx5wk\" (UID: \"b90291c1-9921-431d-b77d-4196cb5219df\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.737253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwt5b\" (UniqueName: \"kubernetes.io/projected/2cddc38a-7d58-47ec-a296-b0447a8b67c4-kube-api-access-zwt5b\") pod \"nova-operator-controller-manager-55bff696bd-hhlsd\" (UID: \"2cddc38a-7d58-47ec-a296-b0447a8b67c4\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.743429 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.744194 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.747506 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.751043 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-kd4vc" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.761027 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.767250 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwt5b\" (UniqueName: \"kubernetes.io/projected/2cddc38a-7d58-47ec-a296-b0447a8b67c4-kube-api-access-zwt5b\") pod \"nova-operator-controller-manager-55bff696bd-hhlsd\" (UID: \"2cddc38a-7d58-47ec-a296-b0447a8b67c4\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.768532 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp2jg\" (UniqueName: \"kubernetes.io/projected/2def646c-f3ea-46d4-9003-ea05abd176d7-kube-api-access-mp2jg\") pod \"octavia-operator-controller-manager-6687f8d877-ccljn\" (UID: \"2def646c-f3ea-46d4-9003-ea05abd176d7\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.772682 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jskw9\" (UniqueName: \"kubernetes.io/projected/200b1a45-bbca-460b-a578-2c913f0075f9-kube-api-access-jskw9\") pod \"neutron-operator-controller-manager-585dbc889-66cdn\" (UID: \"200b1a45-bbca-460b-a578-2c913f0075f9\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.785439 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.786287 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.789609 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-7d9bl" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.838626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42zzs\" (UniqueName: \"kubernetes.io/projected/409bac45-bf99-4268-aa3c-19e9d7392932-kube-api-access-42zzs\") pod \"swift-operator-controller-manager-68fc8c869-np7nt\" (UID: \"409bac45-bf99-4268-aa3c-19e9d7392932\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.838693 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.838742 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nfvv\" (UniqueName: \"kubernetes.io/projected/cbb834cd-ddc7-462f-8b67-d8466ce5f53e-kube-api-access-5nfvv\") pod \"ovn-operator-controller-manager-788c46999f-5gqtj\" (UID: \"cbb834cd-ddc7-462f-8b67-d8466ce5f53e\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.838809 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz6pq\" (UniqueName: \"kubernetes.io/projected/b9cdd96d-49df-489b-9e07-0529338f4b78-kube-api-access-pz6pq\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.838850 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj9pc\" (UniqueName: \"kubernetes.io/projected/b90291c1-9921-431d-b77d-4196cb5219df-kube-api-access-hj9pc\") pod \"placement-operator-controller-manager-5b964cf4cd-fx5wk\" (UID: \"b90291c1-9921-431d-b77d-4196cb5219df\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.838963 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2chtr\" (UniqueName: \"kubernetes.io/projected/cfd44a3c-5745-48af-b70c-86402a61492e-kube-api-access-2chtr\") pod \"test-operator-controller-manager-56f8bfcd9f-qf4mt\" (UID: \"cfd44a3c-5745-48af-b70c-86402a61492e\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.839050 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvh9p\" (UniqueName: \"kubernetes.io/projected/ebea840f-6b28-4ed1-8483-084f00350673-kube-api-access-fvh9p\") pod \"telemetry-operator-controller-manager-64b5b76f97-sp2rh\" (UID: \"ebea840f-6b28-4ed1-8483-084f00350673\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" Feb 02 09:09:02 crc kubenswrapper[4747]: E0202 09:09:02.840443 4747 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:02 crc kubenswrapper[4747]: E0202 09:09:02.840581 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert podName:b9cdd96d-49df-489b-9e07-0529338f4b78 nodeName:}" failed. No retries permitted until 2026-02-02 09:09:03.340553424 +0000 UTC m=+755.884891857 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" (UID: "b9cdd96d-49df-489b-9e07-0529338f4b78") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.850248 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.860059 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.860219 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz6pq\" (UniqueName: \"kubernetes.io/projected/b9cdd96d-49df-489b-9e07-0529338f4b78-kube-api-access-pz6pq\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.860956 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42zzs\" (UniqueName: \"kubernetes.io/projected/409bac45-bf99-4268-aa3c-19e9d7392932-kube-api-access-42zzs\") pod \"swift-operator-controller-manager-68fc8c869-np7nt\" (UID: \"409bac45-bf99-4268-aa3c-19e9d7392932\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.862228 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.874413 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj9pc\" (UniqueName: \"kubernetes.io/projected/b90291c1-9921-431d-b77d-4196cb5219df-kube-api-access-hj9pc\") pod \"placement-operator-controller-manager-5b964cf4cd-fx5wk\" (UID: \"b90291c1-9921-431d-b77d-4196cb5219df\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.879053 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nfvv\" (UniqueName: \"kubernetes.io/projected/cbb834cd-ddc7-462f-8b67-d8466ce5f53e-kube-api-access-5nfvv\") pod \"ovn-operator-controller-manager-788c46999f-5gqtj\" (UID: \"cbb834cd-ddc7-462f-8b67-d8466ce5f53e\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.888312 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-qpfgd"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.889595 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.906011 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-v296g" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.911352 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-qpfgd"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.919212 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.943273 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.943462 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.944304 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.944506 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2chtr\" (UniqueName: \"kubernetes.io/projected/cfd44a3c-5745-48af-b70c-86402a61492e-kube-api-access-2chtr\") pod \"test-operator-controller-manager-56f8bfcd9f-qf4mt\" (UID: \"cfd44a3c-5745-48af-b70c-86402a61492e\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.944563 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6r57\" (UniqueName: \"kubernetes.io/projected/0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae-kube-api-access-k6r57\") pod \"watcher-operator-controller-manager-564965969-qpfgd\" (UID: \"0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.944584 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvh9p\" (UniqueName: \"kubernetes.io/projected/ebea840f-6b28-4ed1-8483-084f00350673-kube-api-access-fvh9p\") pod \"telemetry-operator-controller-manager-64b5b76f97-sp2rh\" (UID: \"ebea840f-6b28-4ed1-8483-084f00350673\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.947352 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.947557 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.947512 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-26jzj" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.952147 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.956546 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt"] Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.978356 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvh9p\" (UniqueName: \"kubernetes.io/projected/ebea840f-6b28-4ed1-8483-084f00350673-kube-api-access-fvh9p\") pod \"telemetry-operator-controller-manager-64b5b76f97-sp2rh\" (UID: \"ebea840f-6b28-4ed1-8483-084f00350673\") " pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" Feb 02 09:09:02 crc kubenswrapper[4747]: I0202 09:09:02.978782 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.003031 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2chtr\" (UniqueName: \"kubernetes.io/projected/cfd44a3c-5745-48af-b70c-86402a61492e-kube-api-access-2chtr\") pod \"test-operator-controller-manager-56f8bfcd9f-qf4mt\" (UID: \"cfd44a3c-5745-48af-b70c-86402a61492e\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.026420 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.037525 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.038969 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.040943 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.043051 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-g4pjn" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.045570 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.045641 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.045899 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.045981 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxq2g\" (UniqueName: \"kubernetes.io/projected/c425595e-72b6-48f6-91fc-0469ac7a634e-kube-api-access-vxq2g\") pod \"rabbitmq-cluster-operator-manager-668c99d594-jrklv\" (UID: \"c425595e-72b6-48f6-91fc-0469ac7a634e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.046009 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6r57\" (UniqueName: \"kubernetes.io/projected/0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae-kube-api-access-k6r57\") pod \"watcher-operator-controller-manager-564965969-qpfgd\" (UID: \"0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.046077 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf9g7\" (UniqueName: \"kubernetes.io/projected/1c82e754-744c-49e4-9ec9-3d8dada42adf-kube-api-access-zf9g7\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.046307 4747 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.046386 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert podName:9069468d-21ec-4ca1-8c03-e35555180a9a nodeName:}" failed. No retries permitted until 2026-02-02 09:09:04.046344158 +0000 UTC m=+756.590682591 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert") pod "infra-operator-controller-manager-79955696d6-qlc6w" (UID: "9069468d-21ec-4ca1-8c03-e35555180a9a") : secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.050565 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.050858 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.071101 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.085540 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6r57\" (UniqueName: \"kubernetes.io/projected/0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae-kube-api-access-k6r57\") pod \"watcher-operator-controller-manager-564965969-qpfgd\" (UID: \"0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.150344 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.150463 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.150543 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxq2g\" (UniqueName: \"kubernetes.io/projected/c425595e-72b6-48f6-91fc-0469ac7a634e-kube-api-access-vxq2g\") pod \"rabbitmq-cluster-operator-manager-668c99d594-jrklv\" (UID: \"c425595e-72b6-48f6-91fc-0469ac7a634e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.150602 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf9g7\" (UniqueName: \"kubernetes.io/projected/1c82e754-744c-49e4-9ec9-3d8dada42adf-kube-api-access-zf9g7\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.150747 4747 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.150816 4747 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.150836 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:03.650813713 +0000 UTC m=+756.195152196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "metrics-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.150926 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:03.650903906 +0000 UTC m=+756.195242339 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.163653 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.167062 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxq2g\" (UniqueName: \"kubernetes.io/projected/c425595e-72b6-48f6-91fc-0469ac7a634e-kube-api-access-vxq2g\") pod \"rabbitmq-cluster-operator-manager-668c99d594-jrklv\" (UID: \"c425595e-72b6-48f6-91fc-0469ac7a634e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.167206 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf9g7\" (UniqueName: \"kubernetes.io/projected/1c82e754-744c-49e4-9ec9-3d8dada42adf-kube-api-access-zf9g7\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.217725 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.232970 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.370577 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.370735 4747 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.370791 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert podName:b9cdd96d-49df-489b-9e07-0529338f4b78 nodeName:}" failed. No retries permitted until 2026-02-02 09:09:04.370773744 +0000 UTC m=+756.915112177 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" (UID: "b9cdd96d-49df-489b-9e07-0529338f4b78") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.409416 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.409439 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.423502 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.447158 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.596994 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.610925 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.682112 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.682238 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.682383 4747 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.682440 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:04.682422827 +0000 UTC m=+757.226761260 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "metrics-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.689510 4747 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: E0202 09:09:03.689608 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:04.689584252 +0000 UTC m=+757.233922685 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "webhook-server-cert" not found Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.695625 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" event={"ID":"e7700f72-6bbd-4ba3-9835-3665f7b1df89","Type":"ContainerStarted","Data":"f746372b7b5d47e09299f4ff7bfc2e4a1dc6d5dd3f9bced300dcfa71a76f76bc"} Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.696595 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" event={"ID":"da38d39a-f91a-42d4-a773-5fd894e74305","Type":"ContainerStarted","Data":"7c65ce2619637b83ec43670e4c3719acbfd701b590b6dee6b79bbc80dfc94966"} Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.697237 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" event={"ID":"4923ca42-a876-42cd-b992-21573dde4361","Type":"ContainerStarted","Data":"0d4fbb7b76f470ca290ef8ece094e98de9842ed6350b330531bf17d6897a765e"} Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.698283 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" event={"ID":"38cdc2d7-bf0e-499c-9953-5f3088714675","Type":"ContainerStarted","Data":"ef4fc5acb26cd104aac3069f34098d36a5bb024edfdf9cb2a80ccb5581bc13de"} Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.714403 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" event={"ID":"1c1d1d29-d3e3-4569-a8ef-f68a5dee0242","Type":"ContainerStarted","Data":"7d5f7d050b5fe21ff758a53ec8d431a44d36329d2214569a444e6abc9e1f2f49"} Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.715263 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" event={"ID":"db5ae332-d8f3-41a2-9c21-45ff5536cbb8","Type":"ContainerStarted","Data":"9dc70bd75f922dc0f4f4d503356633429b0349b865a37076c1524fee67da11ff"} Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.867740 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.905874 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2"] Feb 02 09:09:03 crc kubenswrapper[4747]: I0202 09:09:03.913809 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx"] Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.036243 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k"] Feb 02 09:09:04 crc kubenswrapper[4747]: W0202 09:09:04.039898 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7c8163b_0e29_4cdc_a5d0_5d139e47f1e3.slice/crio-29beaa76a6618e9c270c7ed623563f04ac1855b087f0ece5a30015c91db60fea WatchSource:0}: Error finding container 29beaa76a6618e9c270c7ed623563f04ac1855b087f0ece5a30015c91db60fea: Status 404 returned error can't find the container with id 29beaa76a6618e9c270c7ed623563f04ac1855b087f0ece5a30015c91db60fea Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.091251 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.092256 4747 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.092304 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert podName:9069468d-21ec-4ca1-8c03-e35555180a9a nodeName:}" failed. No retries permitted until 2026-02-02 09:09:06.092290713 +0000 UTC m=+758.636629146 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert") pod "infra-operator-controller-manager-79955696d6-qlc6w" (UID: "9069468d-21ec-4ca1-8c03-e35555180a9a") : secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.205728 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh"] Feb 02 09:09:04 crc kubenswrapper[4747]: W0202 09:09:04.210193 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb90291c1_9921_431d_b77d_4196cb5219df.slice/crio-ad860b0b885ef04cb149b4db55a147d6f864231e70cc63577ee691c4bc5f5e72 WatchSource:0}: Error finding container ad860b0b885ef04cb149b4db55a147d6f864231e70cc63577ee691c4bc5f5e72: Status 404 returned error can't find the container with id ad860b0b885ef04cb149b4db55a147d6f864231e70cc63577ee691c4bc5f5e72 Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.220733 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj"] Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.241040 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd"] Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.247742 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk"] Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.258589 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn"] Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.276286 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt"] Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.290486 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zwt5b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-55bff696bd-hhlsd_openstack-operators(2cddc38a-7d58-47ec-a296-b0447a8b67c4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.290588 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2chtr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-qf4mt_openstack-operators(cfd44a3c-5745-48af-b70c-86402a61492e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.290667 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:bbb46b8b3b69fdfad7bafc10a7e88f6ea58bcdc3c91e30beb79e24417d52e0f6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jskw9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-585dbc889-66cdn_openstack-operators(200b1a45-bbca-460b-a578-2c913f0075f9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.291469 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt"] Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.291605 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" podUID="2cddc38a-7d58-47ec-a296-b0447a8b67c4" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.291651 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" podUID="cfd44a3c-5745-48af-b70c-86402a61492e" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.292532 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" podUID="200b1a45-bbca-460b-a578-2c913f0075f9" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.294313 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mp2jg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-ccljn_openstack-operators(2def646c-f3ea-46d4-9003-ea05abd176d7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.298319 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn"] Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.299683 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" podUID="2def646c-f3ea-46d4-9003-ea05abd176d7" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.336853 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv"] Feb 02 09:09:04 crc kubenswrapper[4747]: W0202 09:09:04.361145 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc425595e_72b6_48f6_91fc_0469ac7a634e.slice/crio-6dab803d29ddd6b4af8f80a4c0b647cacc6af09e2f88c198230ebc7de6b7608c WatchSource:0}: Error finding container 6dab803d29ddd6b4af8f80a4c0b647cacc6af09e2f88c198230ebc7de6b7608c: Status 404 returned error can't find the container with id 6dab803d29ddd6b4af8f80a4c0b647cacc6af09e2f88c198230ebc7de6b7608c Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.364235 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vxq2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-jrklv_openstack-operators(c425595e-72b6-48f6-91fc-0469ac7a634e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.365497 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" podUID="c425595e-72b6-48f6-91fc-0469ac7a634e" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.365876 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-qpfgd"] Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.407663 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.407820 4747 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.407870 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert podName:b9cdd96d-49df-489b-9e07-0529338f4b78 nodeName:}" failed. No retries permitted until 2026-02-02 09:09:06.407856961 +0000 UTC m=+758.952195384 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" (UID: "b9cdd96d-49df-489b-9e07-0529338f4b78") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.712810 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.712956 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.713112 4747 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.713168 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:06.713151409 +0000 UTC m=+759.257489842 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "webhook-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.713254 4747 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.713332 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:06.713303132 +0000 UTC m=+759.257641565 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "metrics-server-cert" not found Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.732236 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" event={"ID":"cfd44a3c-5745-48af-b70c-86402a61492e","Type":"ContainerStarted","Data":"7f7aafab0cae4962c3f1bee696ff13f820ad6b7cc56fb6f4f6097b1097fff96b"} Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.733785 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" podUID="cfd44a3c-5745-48af-b70c-86402a61492e" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.735495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" event={"ID":"0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae","Type":"ContainerStarted","Data":"af221bd3d6a0b999dad6df48f41c028b80c47e22413bde586c49466cb8d6089d"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.737115 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" event={"ID":"409bac45-bf99-4268-aa3c-19e9d7392932","Type":"ContainerStarted","Data":"86e8df06e18b14857914915b80d4c8e25a139e3ff4840f8fefcc61c29c747ae8"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.738365 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" event={"ID":"e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3","Type":"ContainerStarted","Data":"29beaa76a6618e9c270c7ed623563f04ac1855b087f0ece5a30015c91db60fea"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.740627 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" event={"ID":"cbb834cd-ddc7-462f-8b67-d8466ce5f53e","Type":"ContainerStarted","Data":"1c286c3d60f880b5324a9920d35b8c6817d29db884398c7f9c44b4c8ccdd1a2f"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.741867 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" event={"ID":"2cddc38a-7d58-47ec-a296-b0447a8b67c4","Type":"ContainerStarted","Data":"fc6015b001556a3b412e273e1cf9eb8fcc25d30c19f22e5d0367c0661addb295"} Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.743489 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" podUID="2cddc38a-7d58-47ec-a296-b0447a8b67c4" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.758541 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" event={"ID":"2343f1c6-2d43-468e-a2ea-7e6b2b915cec","Type":"ContainerStarted","Data":"ce37a08e0ab4eb32707624bf551acf3d07c34c56f2d277d03c0cfd59956610d8"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.761459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" event={"ID":"d2a8dd02-a258-40be-ae2b-8c4d8f093870","Type":"ContainerStarted","Data":"467b675a1d7cc09f2ee6569a2f49f881e4e0f631e395a860c80e20e5a441a3a2"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.762695 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" event={"ID":"ebea840f-6b28-4ed1-8483-084f00350673","Type":"ContainerStarted","Data":"e1486fba26d6b2bb3b3895965ea2ad4a0da03a1a1c63339a841880cc0f43c337"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.764244 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" event={"ID":"b90291c1-9921-431d-b77d-4196cb5219df","Type":"ContainerStarted","Data":"ad860b0b885ef04cb149b4db55a147d6f864231e70cc63577ee691c4bc5f5e72"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.765299 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" event={"ID":"200b1a45-bbca-460b-a578-2c913f0075f9","Type":"ContainerStarted","Data":"ed34ab0f1c728056abf30a415d04885fa2a155a038827185dc51d1ab8fde0465"} Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.766552 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:bbb46b8b3b69fdfad7bafc10a7e88f6ea58bcdc3c91e30beb79e24417d52e0f6\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" podUID="200b1a45-bbca-460b-a578-2c913f0075f9" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.775249 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" event={"ID":"c01d1e85-b676-404c-8565-900de1d7b9ff","Type":"ContainerStarted","Data":"93e5b2ba65329f35c80893b70bbee78d6dbe1835fc5bf461a8d9d6c3ed535ca7"} Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.779213 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" event={"ID":"2def646c-f3ea-46d4-9003-ea05abd176d7","Type":"ContainerStarted","Data":"699a44aa39313bda253aacd5c155f240d4b13fa387968099e0d24fc1292185ba"} Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.781010 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" podUID="2def646c-f3ea-46d4-9003-ea05abd176d7" Feb 02 09:09:04 crc kubenswrapper[4747]: I0202 09:09:04.782245 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" event={"ID":"c425595e-72b6-48f6-91fc-0469ac7a634e","Type":"ContainerStarted","Data":"6dab803d29ddd6b4af8f80a4c0b647cacc6af09e2f88c198230ebc7de6b7608c"} Feb 02 09:09:04 crc kubenswrapper[4747]: E0202 09:09:04.783453 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" podUID="c425595e-72b6-48f6-91fc-0469ac7a634e" Feb 02 09:09:05 crc kubenswrapper[4747]: E0202 09:09:05.795199 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:bbb46b8b3b69fdfad7bafc10a7e88f6ea58bcdc3c91e30beb79e24417d52e0f6\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" podUID="200b1a45-bbca-460b-a578-2c913f0075f9" Feb 02 09:09:05 crc kubenswrapper[4747]: E0202 09:09:05.795510 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" podUID="c425595e-72b6-48f6-91fc-0469ac7a634e" Feb 02 09:09:05 crc kubenswrapper[4747]: E0202 09:09:05.801211 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" podUID="2def646c-f3ea-46d4-9003-ea05abd176d7" Feb 02 09:09:05 crc kubenswrapper[4747]: E0202 09:09:05.801268 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" podUID="2cddc38a-7d58-47ec-a296-b0447a8b67c4" Feb 02 09:09:05 crc kubenswrapper[4747]: E0202 09:09:05.801309 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" podUID="cfd44a3c-5745-48af-b70c-86402a61492e" Feb 02 09:09:06 crc kubenswrapper[4747]: I0202 09:09:06.148371 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.148582 4747 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.148675 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert podName:9069468d-21ec-4ca1-8c03-e35555180a9a nodeName:}" failed. No retries permitted until 2026-02-02 09:09:10.148652143 +0000 UTC m=+762.692990656 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert") pod "infra-operator-controller-manager-79955696d6-qlc6w" (UID: "9069468d-21ec-4ca1-8c03-e35555180a9a") : secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:06 crc kubenswrapper[4747]: I0202 09:09:06.453176 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.453961 4747 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.454047 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert podName:b9cdd96d-49df-489b-9e07-0529338f4b78 nodeName:}" failed. No retries permitted until 2026-02-02 09:09:10.454022842 +0000 UTC m=+762.998361345 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" (UID: "b9cdd96d-49df-489b-9e07-0529338f4b78") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:06 crc kubenswrapper[4747]: I0202 09:09:06.759227 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:06 crc kubenswrapper[4747]: I0202 09:09:06.759299 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.759435 4747 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.759513 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:10.759494215 +0000 UTC m=+763.303832648 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "metrics-server-cert" not found Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.759446 4747 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 09:09:06 crc kubenswrapper[4747]: E0202 09:09:06.759604 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:10.759589507 +0000 UTC m=+763.303927940 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "webhook-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: I0202 09:09:10.212334 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.212520 4747 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.213201 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert podName:9069468d-21ec-4ca1-8c03-e35555180a9a nodeName:}" failed. No retries permitted until 2026-02-02 09:09:18.213180675 +0000 UTC m=+770.757519108 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert") pod "infra-operator-controller-manager-79955696d6-qlc6w" (UID: "9069468d-21ec-4ca1-8c03-e35555180a9a") : secret "infra-operator-webhook-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: I0202 09:09:10.517781 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.517887 4747 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.517955 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert podName:b9cdd96d-49df-489b-9e07-0529338f4b78 nodeName:}" failed. No retries permitted until 2026-02-02 09:09:18.51792857 +0000 UTC m=+771.062266993 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" (UID: "b9cdd96d-49df-489b-9e07-0529338f4b78") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: I0202 09:09:10.822238 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:10 crc kubenswrapper[4747]: I0202 09:09:10.822330 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.822509 4747 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.822572 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:18.822551591 +0000 UTC m=+771.366890034 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "webhook-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.822674 4747 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 09:09:10 crc kubenswrapper[4747]: E0202 09:09:10.822780 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:18.822757086 +0000 UTC m=+771.367095569 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "metrics-server-cert" not found Feb 02 09:09:11 crc kubenswrapper[4747]: I0202 09:09:11.745043 4747 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 09:09:17 crc kubenswrapper[4747]: I0202 09:09:17.879883 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" event={"ID":"4923ca42-a876-42cd-b992-21573dde4361","Type":"ContainerStarted","Data":"a954911f6dbfd86a0863387b924dce360b353d49be3544aaf35c0cf894bb35ba"} Feb 02 09:09:17 crc kubenswrapper[4747]: I0202 09:09:17.880422 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" Feb 02 09:09:17 crc kubenswrapper[4747]: I0202 09:09:17.904121 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" podStartSLOduration=11.917990817 podStartE2EDuration="15.904105682s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.409789968 +0000 UTC m=+755.954128401" lastFinishedPulling="2026-02-02 09:09:07.395904833 +0000 UTC m=+759.940243266" observedRunningTime="2026-02-02 09:09:17.898441623 +0000 UTC m=+770.442780066" watchObservedRunningTime="2026-02-02 09:09:17.904105682 +0000 UTC m=+770.448444115" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.244896 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.252414 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9069468d-21ec-4ca1-8c03-e35555180a9a-cert\") pod \"infra-operator-controller-manager-79955696d6-qlc6w\" (UID: \"9069468d-21ec-4ca1-8c03-e35555180a9a\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.548184 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.549368 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.556347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b9cdd96d-49df-489b-9e07-0529338f4b78-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb\" (UID: \"b9cdd96d-49df-489b-9e07-0529338f4b78\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.609147 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.852410 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.852468 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:18 crc kubenswrapper[4747]: E0202 09:09:18.852606 4747 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 09:09:18 crc kubenswrapper[4747]: E0202 09:09:18.852656 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs podName:1c82e754-744c-49e4-9ec9-3d8dada42adf nodeName:}" failed. No retries permitted until 2026-02-02 09:09:34.852643323 +0000 UTC m=+787.396981756 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs") pod "openstack-operator-controller-manager-75d6c7dbc6-wphwt" (UID: "1c82e754-744c-49e4-9ec9-3d8dada42adf") : secret "webhook-server-cert" not found Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.861098 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-metrics-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.892386 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" event={"ID":"ebea840f-6b28-4ed1-8483-084f00350673","Type":"ContainerStarted","Data":"895f6758c93c456f336da79664e83637e757f53098a1073257ad5bfbb3e22755"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.893220 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.895007 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" event={"ID":"cbb834cd-ddc7-462f-8b67-d8466ce5f53e","Type":"ContainerStarted","Data":"76e5ecdb287edc60bbb4295d88ccfb05b016f27a1734d592dd5562e24a4e7a90"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.895329 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.896656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" event={"ID":"e7700f72-6bbd-4ba3-9835-3665f7b1df89","Type":"ContainerStarted","Data":"e5032116ec805f034d4cebe424a4fcfab9d1a9ee09f1efe77a8764683221f542"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.896974 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.903968 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" event={"ID":"2343f1c6-2d43-468e-a2ea-7e6b2b915cec","Type":"ContainerStarted","Data":"58a9d290990895cfe3098a0ca803bd034238da5ebb581855fdf8fed02cd2313d"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.904060 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.906067 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" event={"ID":"1c1d1d29-d3e3-4569-a8ef-f68a5dee0242","Type":"ContainerStarted","Data":"0e8cc1de4f638ab50ed413f1f103f62664948f3578344a4006dcb15dbc05188c"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.906386 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.912089 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" event={"ID":"da38d39a-f91a-42d4-a773-5fd894e74305","Type":"ContainerStarted","Data":"b864467dd79348f578d06dfde22b5da65fd79daf993aaba1e1f8b54dd56a2a4e"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.912499 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.922276 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" podStartSLOduration=3.451727766 podStartE2EDuration="16.922259846s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.206136608 +0000 UTC m=+756.750475041" lastFinishedPulling="2026-02-02 09:09:17.676668688 +0000 UTC m=+770.221007121" observedRunningTime="2026-02-02 09:09:18.917854468 +0000 UTC m=+771.462192901" watchObservedRunningTime="2026-02-02 09:09:18.922259846 +0000 UTC m=+771.466598279" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.931534 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" event={"ID":"0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae","Type":"ContainerStarted","Data":"02cd0adcc524f702f5ab6076465cf72b3b076cc4ca8aac160cc40823e88f195c"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.932145 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.968340 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" event={"ID":"c01d1e85-b676-404c-8565-900de1d7b9ff","Type":"ContainerStarted","Data":"229ae124bfa743614d5aec32bc1fea07a2681e9c3cb87fb622f780c024f0bfcb"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.968929 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.973418 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" podStartSLOduration=3.595961463 podStartE2EDuration="16.973407947s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.2642922 +0000 UTC m=+756.808630633" lastFinishedPulling="2026-02-02 09:09:17.641738684 +0000 UTC m=+770.186077117" observedRunningTime="2026-02-02 09:09:18.938711079 +0000 UTC m=+771.483049512" watchObservedRunningTime="2026-02-02 09:09:18.973407947 +0000 UTC m=+771.517746380" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.989095 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" podStartSLOduration=2.9563053679999998 podStartE2EDuration="16.989078991s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.690083005 +0000 UTC m=+756.234421438" lastFinishedPulling="2026-02-02 09:09:17.722856628 +0000 UTC m=+770.267195061" observedRunningTime="2026-02-02 09:09:18.972610358 +0000 UTC m=+771.516948791" watchObservedRunningTime="2026-02-02 09:09:18.989078991 +0000 UTC m=+771.533417424" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.990033 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" event={"ID":"38cdc2d7-bf0e-499c-9953-5f3088714675","Type":"ContainerStarted","Data":"854d4fb10f6fc56ef49f42ad1833e705b8ccf16514c7e8428f1cfe8a580aca4c"} Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.990513 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" podStartSLOduration=3.26342157 podStartE2EDuration="16.990508446s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.917124758 +0000 UTC m=+756.461463181" lastFinishedPulling="2026-02-02 09:09:17.644211624 +0000 UTC m=+770.188550057" observedRunningTime="2026-02-02 09:09:18.987925422 +0000 UTC m=+771.532263855" watchObservedRunningTime="2026-02-02 09:09:18.990508446 +0000 UTC m=+771.534846879" Feb 02 09:09:18 crc kubenswrapper[4747]: I0202 09:09:18.990634 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.009216 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" event={"ID":"b90291c1-9921-431d-b77d-4196cb5219df","Type":"ContainerStarted","Data":"3f1218d369efa8caab826c9a3c0952df63a6ad30afc1b80fcb539c5cd4ea920b"} Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.009839 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.025812 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" event={"ID":"409bac45-bf99-4268-aa3c-19e9d7392932","Type":"ContainerStarted","Data":"f1b3cc1eb7a64942e294619e38170196df7fe8041ca18decf2e7d7f2bc01365e"} Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.026411 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.027140 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" podStartSLOduration=3.049339592 podStartE2EDuration="17.027122531s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.663101014 +0000 UTC m=+756.207439447" lastFinishedPulling="2026-02-02 09:09:17.640883953 +0000 UTC m=+770.185222386" observedRunningTime="2026-02-02 09:09:19.026609439 +0000 UTC m=+771.570947872" watchObservedRunningTime="2026-02-02 09:09:19.027122531 +0000 UTC m=+771.571460964" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.062374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" event={"ID":"db5ae332-d8f3-41a2-9c21-45ff5536cbb8","Type":"ContainerStarted","Data":"e778049435293c37172d3200a51448d7680557a51f52bb787a7abf21c32c7b49"} Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.062456 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.077762 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" podStartSLOduration=2.871881432 podStartE2EDuration="17.077746229s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.435975379 +0000 UTC m=+755.980313802" lastFinishedPulling="2026-02-02 09:09:17.641840166 +0000 UTC m=+770.186178599" observedRunningTime="2026-02-02 09:09:19.067209642 +0000 UTC m=+771.611548075" watchObservedRunningTime="2026-02-02 09:09:19.077746229 +0000 UTC m=+771.622084652" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.125482 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" event={"ID":"d2a8dd02-a258-40be-ae2b-8c4d8f093870","Type":"ContainerStarted","Data":"d4c57ddddc67f9f36a3c580087e4f5c214609fee4aa6b00b7971ead0c07af314"} Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.129590 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.143761 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" event={"ID":"e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3","Type":"ContainerStarted","Data":"aa2a1b1cc79b40838cc73acddc20b353c30e84a92dc7eb3291d30f97188dcfbe"} Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.144003 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.160829 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" podStartSLOduration=2.8379232610000003 podStartE2EDuration="17.160811101s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.31951396 +0000 UTC m=+755.863852393" lastFinishedPulling="2026-02-02 09:09:17.6424018 +0000 UTC m=+770.186740233" observedRunningTime="2026-02-02 09:09:19.094822247 +0000 UTC m=+771.639160690" watchObservedRunningTime="2026-02-02 09:09:19.160811101 +0000 UTC m=+771.705149534" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.190590 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" podStartSLOduration=3.795370861 podStartE2EDuration="17.190570189s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.281270515 +0000 UTC m=+756.825608968" lastFinishedPulling="2026-02-02 09:09:17.676469873 +0000 UTC m=+770.220808296" observedRunningTime="2026-02-02 09:09:19.146221874 +0000 UTC m=+771.690560317" watchObservedRunningTime="2026-02-02 09:09:19.190570189 +0000 UTC m=+771.734908622" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.258372 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" podStartSLOduration=3.467379348 podStartE2EDuration="17.258351397s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.852726353 +0000 UTC m=+756.397064786" lastFinishedPulling="2026-02-02 09:09:17.643698402 +0000 UTC m=+770.188036835" observedRunningTime="2026-02-02 09:09:19.203547457 +0000 UTC m=+771.747885900" watchObservedRunningTime="2026-02-02 09:09:19.258351397 +0000 UTC m=+771.802689830" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.296074 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" podStartSLOduration=3.97684015 podStartE2EDuration="17.29605126s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.363295071 +0000 UTC m=+756.907633504" lastFinishedPulling="2026-02-02 09:09:17.682506181 +0000 UTC m=+770.226844614" observedRunningTime="2026-02-02 09:09:19.250551427 +0000 UTC m=+771.794889860" watchObservedRunningTime="2026-02-02 09:09:19.29605126 +0000 UTC m=+771.840389693" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.309163 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" podStartSLOduration=3.305212442 podStartE2EDuration="17.30914424s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.639655901 +0000 UTC m=+756.183994324" lastFinishedPulling="2026-02-02 09:09:17.643587689 +0000 UTC m=+770.187926122" observedRunningTime="2026-02-02 09:09:19.284687312 +0000 UTC m=+771.829025745" watchObservedRunningTime="2026-02-02 09:09:19.30914424 +0000 UTC m=+771.853482673" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.327613 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" podStartSLOduration=3.898484383 podStartE2EDuration="17.327598531s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.213557349 +0000 UTC m=+756.757895782" lastFinishedPulling="2026-02-02 09:09:17.642671467 +0000 UTC m=+770.187009930" observedRunningTime="2026-02-02 09:09:19.317032543 +0000 UTC m=+771.861370976" watchObservedRunningTime="2026-02-02 09:09:19.327598531 +0000 UTC m=+771.871936964" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.346911 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" podStartSLOduration=3.622929083 podStartE2EDuration="17.346898163s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:03.91679913 +0000 UTC m=+756.461137563" lastFinishedPulling="2026-02-02 09:09:17.64076821 +0000 UTC m=+770.185106643" observedRunningTime="2026-02-02 09:09:19.345395726 +0000 UTC m=+771.889734159" watchObservedRunningTime="2026-02-02 09:09:19.346898163 +0000 UTC m=+771.891236596" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.385020 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" podStartSLOduration=3.788005201 podStartE2EDuration="17.384997375s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.044442652 +0000 UTC m=+756.588781075" lastFinishedPulling="2026-02-02 09:09:17.641434776 +0000 UTC m=+770.185773249" observedRunningTime="2026-02-02 09:09:19.375552274 +0000 UTC m=+771.919890707" watchObservedRunningTime="2026-02-02 09:09:19.384997375 +0000 UTC m=+771.929335808" Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.438852 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w"] Feb 02 09:09:19 crc kubenswrapper[4747]: I0202 09:09:19.573305 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb"] Feb 02 09:09:20 crc kubenswrapper[4747]: I0202 09:09:20.157404 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" event={"ID":"9069468d-21ec-4ca1-8c03-e35555180a9a","Type":"ContainerStarted","Data":"99b48709f05c96843d2c4eb5e413ddde036a1fe3234c724f1b239206fd1e3077"} Feb 02 09:09:20 crc kubenswrapper[4747]: I0202 09:09:20.160785 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" event={"ID":"b9cdd96d-49df-489b-9e07-0529338f4b78","Type":"ContainerStarted","Data":"5b7df1db70e8febea280e5055ca0238a498f046d0ec9d2184e525fb25348507b"} Feb 02 09:09:20 crc kubenswrapper[4747]: I0202 09:09:20.518671 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:09:20 crc kubenswrapper[4747]: I0202 09:09:20.518725 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:09:22 crc kubenswrapper[4747]: I0202 09:09:22.546130 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-7kw8z" Feb 02 09:09:22 crc kubenswrapper[4747]: I0202 09:09:22.615963 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-hqm5n" Feb 02 09:09:22 crc kubenswrapper[4747]: I0202 09:09:22.693419 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-rtv8v" Feb 02 09:09:23 crc kubenswrapper[4747]: I0202 09:09:23.030081 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5gqtj" Feb 02 09:09:23 crc kubenswrapper[4747]: I0202 09:09:23.043764 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-fx5wk" Feb 02 09:09:23 crc kubenswrapper[4747]: I0202 09:09:23.062849 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-np7nt" Feb 02 09:09:23 crc kubenswrapper[4747]: I0202 09:09:23.103434 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-64b5b76f97-sp2rh" Feb 02 09:09:23 crc kubenswrapper[4747]: I0202 09:09:23.235273 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-qpfgd" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.238971 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" event={"ID":"2def646c-f3ea-46d4-9003-ea05abd176d7","Type":"ContainerStarted","Data":"413e1f4746c4f9f70a307a425a489677ea54c9fa2d6f39de5b5c5a9f918f617b"} Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.239827 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.241065 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" event={"ID":"c425595e-72b6-48f6-91fc-0469ac7a634e","Type":"ContainerStarted","Data":"86750c3c19702e381acec009abe2db6b0c47f7466c1fa8d05081571f80476468"} Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.243276 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" event={"ID":"cfd44a3c-5745-48af-b70c-86402a61492e","Type":"ContainerStarted","Data":"0a157358624bb10524ab96f6389011ff403249be156491339f5e72250aa68bbf"} Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.243623 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.245285 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" event={"ID":"9069468d-21ec-4ca1-8c03-e35555180a9a","Type":"ContainerStarted","Data":"8cbcf3771df5fa1c0fcdd03e805cbb4276be8355844801d8ab0303a38dffe641"} Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.247140 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" event={"ID":"200b1a45-bbca-460b-a578-2c913f0075f9","Type":"ContainerStarted","Data":"2d57b76b63303f5219a12a04cb49d108ee6d5f583439efd1add4d70307e50b92"} Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.247497 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.248822 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" event={"ID":"b9cdd96d-49df-489b-9e07-0529338f4b78","Type":"ContainerStarted","Data":"47e6d75daab974fcc776f53555a17fa75b460d38cc983c6823f2f9cfe543f668"} Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.249435 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.254911 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" event={"ID":"2cddc38a-7d58-47ec-a296-b0447a8b67c4","Type":"ContainerStarted","Data":"d58a58096b9b579eb071d25d388fabcd6c034d76f26d0715d2b8c128fe6687f0"} Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.255544 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.260735 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" podStartSLOduration=2.789802919 podStartE2EDuration="25.260719395s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.29415233 +0000 UTC m=+756.838490773" lastFinishedPulling="2026-02-02 09:09:26.765068816 +0000 UTC m=+779.309407249" observedRunningTime="2026-02-02 09:09:27.25650844 +0000 UTC m=+779.800846873" watchObservedRunningTime="2026-02-02 09:09:27.260719395 +0000 UTC m=+779.805057828" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.280591 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" podStartSLOduration=18.025490196 podStartE2EDuration="25.28056987s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:19.510080065 +0000 UTC m=+772.054418498" lastFinishedPulling="2026-02-02 09:09:26.765159739 +0000 UTC m=+779.309498172" observedRunningTime="2026-02-02 09:09:27.271275018 +0000 UTC m=+779.815613471" watchObservedRunningTime="2026-02-02 09:09:27.28056987 +0000 UTC m=+779.824908323" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.289111 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-jrklv" podStartSLOduration=1.887717927 podStartE2EDuration="24.289097353s" podCreationTimestamp="2026-02-02 09:09:03 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.364041049 +0000 UTC m=+756.908379482" lastFinishedPulling="2026-02-02 09:09:26.765420465 +0000 UTC m=+779.309758908" observedRunningTime="2026-02-02 09:09:27.285270507 +0000 UTC m=+779.829608940" watchObservedRunningTime="2026-02-02 09:09:27.289097353 +0000 UTC m=+779.833435786" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.321332 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" podStartSLOduration=18.100621354 podStartE2EDuration="25.321312656s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:19.586845363 +0000 UTC m=+772.131183786" lastFinishedPulling="2026-02-02 09:09:26.807536655 +0000 UTC m=+779.351875088" observedRunningTime="2026-02-02 09:09:27.319845299 +0000 UTC m=+779.864183742" watchObservedRunningTime="2026-02-02 09:09:27.321312656 +0000 UTC m=+779.865651099" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.351460 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" podStartSLOduration=2.86843814 podStartE2EDuration="25.351436427s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.290621863 +0000 UTC m=+756.834960296" lastFinishedPulling="2026-02-02 09:09:26.77362015 +0000 UTC m=+779.317958583" observedRunningTime="2026-02-02 09:09:27.347856468 +0000 UTC m=+779.892194911" watchObservedRunningTime="2026-02-02 09:09:27.351436427 +0000 UTC m=+779.895774860" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.364777 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" podStartSLOduration=2.811258435 podStartE2EDuration="25.364754299s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.290540441 +0000 UTC m=+756.834878864" lastFinishedPulling="2026-02-02 09:09:26.844036295 +0000 UTC m=+779.388374728" observedRunningTime="2026-02-02 09:09:27.363393535 +0000 UTC m=+779.907731968" watchObservedRunningTime="2026-02-02 09:09:27.364754299 +0000 UTC m=+779.909092732" Feb 02 09:09:27 crc kubenswrapper[4747]: I0202 09:09:27.383360 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" podStartSLOduration=2.908494329 podStartE2EDuration="25.383342182s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="2026-02-02 09:09:04.290352107 +0000 UTC m=+756.834690540" lastFinishedPulling="2026-02-02 09:09:26.76519996 +0000 UTC m=+779.309538393" observedRunningTime="2026-02-02 09:09:27.375600409 +0000 UTC m=+779.919938852" watchObservedRunningTime="2026-02-02 09:09:27.383342182 +0000 UTC m=+779.927680635" Feb 02 09:09:28 crc kubenswrapper[4747]: I0202 09:09:28.262567 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:28 crc kubenswrapper[4747]: I0202 09:09:28.830017 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hcbmr"] Feb 02 09:09:28 crc kubenswrapper[4747]: I0202 09:09:28.831500 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:28 crc kubenswrapper[4747]: I0202 09:09:28.843708 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hcbmr"] Feb 02 09:09:28 crc kubenswrapper[4747]: I0202 09:09:28.937744 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-catalog-content\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:28 crc kubenswrapper[4747]: I0202 09:09:28.937791 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxbmk\" (UniqueName: \"kubernetes.io/projected/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-kube-api-access-kxbmk\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:28 crc kubenswrapper[4747]: I0202 09:09:28.937836 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-utilities\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.038993 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-catalog-content\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.039033 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxbmk\" (UniqueName: \"kubernetes.io/projected/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-kube-api-access-kxbmk\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.039073 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-utilities\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.039470 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-utilities\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.039596 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-catalog-content\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.066100 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxbmk\" (UniqueName: \"kubernetes.io/projected/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-kube-api-access-kxbmk\") pod \"certified-operators-hcbmr\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.146087 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:29 crc kubenswrapper[4747]: I0202 09:09:29.413635 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hcbmr"] Feb 02 09:09:30 crc kubenswrapper[4747]: I0202 09:09:30.280387 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbmr" event={"ID":"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3","Type":"ContainerStarted","Data":"4b564adf83b83e4e8577f92ad80f59ba15adab68745367554c0c0d2dea784d22"} Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.534927 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-zqj92" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.569027 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-gg5lg" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.625194 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-nmsdj" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.766002 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-2rmn5" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.854722 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-lqfh2" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.862890 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-g5qsx" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.924421 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-6fx7k" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.948876 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-66cdn" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.955971 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-hhlsd" Feb 02 09:09:32 crc kubenswrapper[4747]: I0202 09:09:32.984900 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-ccljn" Feb 02 09:09:33 crc kubenswrapper[4747]: I0202 09:09:33.166293 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-qf4mt" Feb 02 09:09:34 crc kubenswrapper[4747]: I0202 09:09:34.320413 4747 generic.go:334] "Generic (PLEG): container finished" podID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerID="6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02" exitCode=0 Feb 02 09:09:34 crc kubenswrapper[4747]: I0202 09:09:34.320497 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbmr" event={"ID":"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3","Type":"ContainerDied","Data":"6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02"} Feb 02 09:09:34 crc kubenswrapper[4747]: I0202 09:09:34.925000 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:34 crc kubenswrapper[4747]: I0202 09:09:34.934655 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1c82e754-744c-49e4-9ec9-3d8dada42adf-webhook-certs\") pod \"openstack-operator-controller-manager-75d6c7dbc6-wphwt\" (UID: \"1c82e754-744c-49e4-9ec9-3d8dada42adf\") " pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:35 crc kubenswrapper[4747]: I0202 09:09:35.086405 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-26jzj" Feb 02 09:09:35 crc kubenswrapper[4747]: I0202 09:09:35.094969 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:35 crc kubenswrapper[4747]: I0202 09:09:35.538274 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt"] Feb 02 09:09:36 crc kubenswrapper[4747]: I0202 09:09:36.335242 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" event={"ID":"1c82e754-744c-49e4-9ec9-3d8dada42adf","Type":"ContainerStarted","Data":"628a0893c979b3d1d336ba9f0693561ddb2e6160a7bc0763a824025dac61aa1f"} Feb 02 09:09:36 crc kubenswrapper[4747]: I0202 09:09:36.335749 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:36 crc kubenswrapper[4747]: I0202 09:09:36.335781 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" event={"ID":"1c82e754-744c-49e4-9ec9-3d8dada42adf","Type":"ContainerStarted","Data":"15e189a2ebb188aba6e5e42c01ebd7b4dcb6e99f73bb6977383ec6c070dc2ca0"} Feb 02 09:09:36 crc kubenswrapper[4747]: I0202 09:09:36.337444 4747 generic.go:334] "Generic (PLEG): container finished" podID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerID="4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f" exitCode=0 Feb 02 09:09:36 crc kubenswrapper[4747]: I0202 09:09:36.337483 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbmr" event={"ID":"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3","Type":"ContainerDied","Data":"4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f"} Feb 02 09:09:36 crc kubenswrapper[4747]: I0202 09:09:36.365711 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" podStartSLOduration=34.365694239 podStartE2EDuration="34.365694239s" podCreationTimestamp="2026-02-02 09:09:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:09:36.361633948 +0000 UTC m=+788.905972401" watchObservedRunningTime="2026-02-02 09:09:36.365694239 +0000 UTC m=+788.910032672" Feb 02 09:09:37 crc kubenswrapper[4747]: I0202 09:09:37.348228 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbmr" event={"ID":"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3","Type":"ContainerStarted","Data":"cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06"} Feb 02 09:09:37 crc kubenswrapper[4747]: I0202 09:09:37.366829 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hcbmr" podStartSLOduration=6.797062426 podStartE2EDuration="9.36681358s" podCreationTimestamp="2026-02-02 09:09:28 +0000 UTC" firstStartedPulling="2026-02-02 09:09:34.323051287 +0000 UTC m=+786.867389730" lastFinishedPulling="2026-02-02 09:09:36.892802411 +0000 UTC m=+789.437140884" observedRunningTime="2026-02-02 09:09:37.363721303 +0000 UTC m=+789.908059786" watchObservedRunningTime="2026-02-02 09:09:37.36681358 +0000 UTC m=+789.911152013" Feb 02 09:09:38 crc kubenswrapper[4747]: I0202 09:09:38.556785 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-qlc6w" Feb 02 09:09:38 crc kubenswrapper[4747]: I0202 09:09:38.619794 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb" Feb 02 09:09:39 crc kubenswrapper[4747]: I0202 09:09:39.146234 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:39 crc kubenswrapper[4747]: I0202 09:09:39.146602 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:39 crc kubenswrapper[4747]: I0202 09:09:39.210315 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:45 crc kubenswrapper[4747]: I0202 09:09:45.106375 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-75d6c7dbc6-wphwt" Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.210956 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.286290 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hcbmr"] Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.443725 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hcbmr" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="registry-server" containerID="cri-o://cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06" gracePeriod=2 Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.906904 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.951498 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-catalog-content\") pod \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.951627 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxbmk\" (UniqueName: \"kubernetes.io/projected/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-kube-api-access-kxbmk\") pod \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.951652 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-utilities\") pod \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\" (UID: \"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3\") " Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.952538 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-utilities" (OuterVolumeSpecName: "utilities") pod "a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" (UID: "a0f8f238-2144-4e1b-a28b-cb958a0fb4a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:09:49 crc kubenswrapper[4747]: I0202 09:09:49.959651 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-kube-api-access-kxbmk" (OuterVolumeSpecName: "kube-api-access-kxbmk") pod "a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" (UID: "a0f8f238-2144-4e1b-a28b-cb958a0fb4a3"). InnerVolumeSpecName "kube-api-access-kxbmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.005879 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" (UID: "a0f8f238-2144-4e1b-a28b-cb958a0fb4a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.053222 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxbmk\" (UniqueName: \"kubernetes.io/projected/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-kube-api-access-kxbmk\") on node \"crc\" DevicePath \"\"" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.053255 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.053265 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.452703 4747 generic.go:334] "Generic (PLEG): container finished" podID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerID="cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06" exitCode=0 Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.452744 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbmr" event={"ID":"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3","Type":"ContainerDied","Data":"cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06"} Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.452771 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbmr" event={"ID":"a0f8f238-2144-4e1b-a28b-cb958a0fb4a3","Type":"ContainerDied","Data":"4b564adf83b83e4e8577f92ad80f59ba15adab68745367554c0c0d2dea784d22"} Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.452787 4747 scope.go:117] "RemoveContainer" containerID="cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.452786 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbmr" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.481840 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hcbmr"] Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.481985 4747 scope.go:117] "RemoveContainer" containerID="4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.490223 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hcbmr"] Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.503379 4747 scope.go:117] "RemoveContainer" containerID="6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.518661 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.518719 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.518775 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.519378 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b0adff2464258c94f1128c08c9ab4c452fc551d8ccc27b1f6ec0c8bc78ed9f7b"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.519448 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://b0adff2464258c94f1128c08c9ab4c452fc551d8ccc27b1f6ec0c8bc78ed9f7b" gracePeriod=600 Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.550699 4747 scope.go:117] "RemoveContainer" containerID="cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06" Feb 02 09:09:50 crc kubenswrapper[4747]: E0202 09:09:50.551412 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06\": container with ID starting with cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06 not found: ID does not exist" containerID="cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.551464 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06"} err="failed to get container status \"cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06\": rpc error: code = NotFound desc = could not find container \"cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06\": container with ID starting with cc3c2594b3000f8163bee977eca9a78471ca6f3892c63d558f09df3b3a21dc06 not found: ID does not exist" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.551494 4747 scope.go:117] "RemoveContainer" containerID="4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f" Feb 02 09:09:50 crc kubenswrapper[4747]: E0202 09:09:50.552139 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f\": container with ID starting with 4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f not found: ID does not exist" containerID="4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.552174 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f"} err="failed to get container status \"4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f\": rpc error: code = NotFound desc = could not find container \"4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f\": container with ID starting with 4ec3d07c8508af69959123cc0b81aa75a0e17e674dff7cc306b73a05bde3c22f not found: ID does not exist" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.552196 4747 scope.go:117] "RemoveContainer" containerID="6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02" Feb 02 09:09:50 crc kubenswrapper[4747]: E0202 09:09:50.552474 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02\": container with ID starting with 6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02 not found: ID does not exist" containerID="6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02" Feb 02 09:09:50 crc kubenswrapper[4747]: I0202 09:09:50.552509 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02"} err="failed to get container status \"6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02\": rpc error: code = NotFound desc = could not find container \"6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02\": container with ID starting with 6cfbee08db2ce714c233fb15ec56f9d30f064013f8f6863c402904851f309e02 not found: ID does not exist" Feb 02 09:09:51 crc kubenswrapper[4747]: I0202 09:09:51.465313 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="b0adff2464258c94f1128c08c9ab4c452fc551d8ccc27b1f6ec0c8bc78ed9f7b" exitCode=0 Feb 02 09:09:51 crc kubenswrapper[4747]: I0202 09:09:51.465364 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"b0adff2464258c94f1128c08c9ab4c452fc551d8ccc27b1f6ec0c8bc78ed9f7b"} Feb 02 09:09:51 crc kubenswrapper[4747]: I0202 09:09:51.465576 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"6ce984bd6987138797418ab2142f7c5fec0cece146a45e3138b5c727a63dd683"} Feb 02 09:09:51 crc kubenswrapper[4747]: I0202 09:09:51.465596 4747 scope.go:117] "RemoveContainer" containerID="9040071d110153768f64a97f2d933e497b3389e6abc689c7c55f4c1a81e9d70b" Feb 02 09:09:52 crc kubenswrapper[4747]: I0202 09:09:52.369137 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" path="/var/lib/kubelet/pods/a0f8f238-2144-4e1b-a28b-cb958a0fb4a3/volumes" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.273629 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xsj7x"] Feb 02 09:10:02 crc kubenswrapper[4747]: E0202 09:10:02.284298 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="registry-server" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.284331 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="registry-server" Feb 02 09:10:02 crc kubenswrapper[4747]: E0202 09:10:02.284347 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="extract-content" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.284353 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="extract-content" Feb 02 09:10:02 crc kubenswrapper[4747]: E0202 09:10:02.284369 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="extract-utilities" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.284375 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="extract-utilities" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.284508 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0f8f238-2144-4e1b-a28b-cb958a0fb4a3" containerName="registry-server" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.285198 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.289119 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.289246 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.289586 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-288lj" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.289770 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.295146 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xsj7x"] Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.370166 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg92z\" (UniqueName: \"kubernetes.io/projected/1457832f-4905-4af5-8375-0507ac38680b-kube-api-access-sg92z\") pod \"dnsmasq-dns-675f4bcbfc-xsj7x\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.370229 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457832f-4905-4af5-8375-0507ac38680b-config\") pod \"dnsmasq-dns-675f4bcbfc-xsj7x\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.388481 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x979d"] Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.390337 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.393634 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.407181 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x979d"] Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.477097 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/d6e186f0-9833-4961-9dc3-317e4af5937b-kube-api-access-n4vbv\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.477202 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg92z\" (UniqueName: \"kubernetes.io/projected/1457832f-4905-4af5-8375-0507ac38680b-kube-api-access-sg92z\") pod \"dnsmasq-dns-675f4bcbfc-xsj7x\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.477242 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457832f-4905-4af5-8375-0507ac38680b-config\") pod \"dnsmasq-dns-675f4bcbfc-xsj7x\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.477265 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.477288 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-config\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.479531 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457832f-4905-4af5-8375-0507ac38680b-config\") pod \"dnsmasq-dns-675f4bcbfc-xsj7x\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.505309 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg92z\" (UniqueName: \"kubernetes.io/projected/1457832f-4905-4af5-8375-0507ac38680b-kube-api-access-sg92z\") pod \"dnsmasq-dns-675f4bcbfc-xsj7x\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.578977 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.579026 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-config\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.579091 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/d6e186f0-9833-4961-9dc3-317e4af5937b-kube-api-access-n4vbv\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.579786 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.579895 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-config\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.594234 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/d6e186f0-9833-4961-9dc3-317e4af5937b-kube-api-access-n4vbv\") pod \"dnsmasq-dns-78dd6ddcc-x979d\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.610545 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:02 crc kubenswrapper[4747]: I0202 09:10:02.712448 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:03 crc kubenswrapper[4747]: I0202 09:10:03.056823 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xsj7x"] Feb 02 09:10:03 crc kubenswrapper[4747]: I0202 09:10:03.145544 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x979d"] Feb 02 09:10:03 crc kubenswrapper[4747]: W0202 09:10:03.145704 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6e186f0_9833_4961_9dc3_317e4af5937b.slice/crio-99790a34d3340115d126f94506bbda69e81c28fd102d106b9eaeb6c644542439 WatchSource:0}: Error finding container 99790a34d3340115d126f94506bbda69e81c28fd102d106b9eaeb6c644542439: Status 404 returned error can't find the container with id 99790a34d3340115d126f94506bbda69e81c28fd102d106b9eaeb6c644542439 Feb 02 09:10:03 crc kubenswrapper[4747]: I0202 09:10:03.563520 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" event={"ID":"1457832f-4905-4af5-8375-0507ac38680b","Type":"ContainerStarted","Data":"e946c0ba50fc7b2966874cbb9d5e563cdda8e9221a0b2ee5fa44f7ab98ff8b93"} Feb 02 09:10:03 crc kubenswrapper[4747]: I0202 09:10:03.565025 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" event={"ID":"d6e186f0-9833-4961-9dc3-317e4af5937b","Type":"ContainerStarted","Data":"99790a34d3340115d126f94506bbda69e81c28fd102d106b9eaeb6c644542439"} Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.163827 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xsj7x"] Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.183346 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rzpgc"] Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.184408 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.198193 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rzpgc"] Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.322078 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.322220 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-config\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.322274 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bssnb\" (UniqueName: \"kubernetes.io/projected/edfd45e2-956e-498b-829f-f474dd4a893d-kube-api-access-bssnb\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.424957 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.425031 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-config\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.425079 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bssnb\" (UniqueName: \"kubernetes.io/projected/edfd45e2-956e-498b-829f-f474dd4a893d-kube-api-access-bssnb\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.426106 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.426619 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-config\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.456876 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bssnb\" (UniqueName: \"kubernetes.io/projected/edfd45e2-956e-498b-829f-f474dd4a893d-kube-api-access-bssnb\") pod \"dnsmasq-dns-666b6646f7-rzpgc\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.460965 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x979d"] Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.484797 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-s9dss"] Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.488127 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.491348 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-s9dss"] Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.504335 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.528295 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-config\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.528670 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.528842 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfmpt\" (UniqueName: \"kubernetes.io/projected/eeae8944-5b29-42b8-86f2-d29056d767b9-kube-api-access-vfmpt\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.630570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfmpt\" (UniqueName: \"kubernetes.io/projected/eeae8944-5b29-42b8-86f2-d29056d767b9-kube-api-access-vfmpt\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.630893 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-config\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.631389 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.631724 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-config\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.632295 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.657250 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfmpt\" (UniqueName: \"kubernetes.io/projected/eeae8944-5b29-42b8-86f2-d29056d767b9-kube-api-access-vfmpt\") pod \"dnsmasq-dns-57d769cc4f-s9dss\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:05 crc kubenswrapper[4747]: I0202 09:10:05.821350 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.027425 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rzpgc"] Feb 02 09:10:06 crc kubenswrapper[4747]: W0202 09:10:06.028647 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedfd45e2_956e_498b_829f_f474dd4a893d.slice/crio-2a6cac38ab961cee58f5ad444c03fabb7d2aa003f9f2bdc0af39ff2149343800 WatchSource:0}: Error finding container 2a6cac38ab961cee58f5ad444c03fabb7d2aa003f9f2bdc0af39ff2149343800: Status 404 returned error can't find the container with id 2a6cac38ab961cee58f5ad444c03fabb7d2aa003f9f2bdc0af39ff2149343800 Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.269847 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-s9dss"] Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.322048 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.323432 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.329284 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.331312 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-b792z" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.340253 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.340407 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.340521 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.340623 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.340722 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.352972 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.462365 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.462428 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.462450 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-server-conf\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.462485 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blzlq\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-kube-api-access-blzlq\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.462516 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.462531 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.467841 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.467924 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-config-data\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.467998 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94de1c50-3e73-4d3c-9f71-194b0fd4f005-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.468096 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94de1c50-3e73-4d3c-9f71-194b0fd4f005-pod-info\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.468196 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569675 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569724 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569743 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-server-conf\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569760 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blzlq\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-kube-api-access-blzlq\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569782 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569798 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569825 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569842 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-config-data\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569871 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94de1c50-3e73-4d3c-9f71-194b0fd4f005-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569896 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94de1c50-3e73-4d3c-9f71-194b0fd4f005-pod-info\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.569966 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.570369 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.570616 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.570920 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.571266 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-config-data\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.571292 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-server-conf\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.571531 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.575558 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94de1c50-3e73-4d3c-9f71-194b0fd4f005-pod-info\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.575746 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.577146 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94de1c50-3e73-4d3c-9f71-194b0fd4f005-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.580058 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.585324 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blzlq\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-kube-api-access-blzlq\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.595474 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.604104 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" event={"ID":"edfd45e2-956e-498b-829f-f474dd4a893d","Type":"ContainerStarted","Data":"2a6cac38ab961cee58f5ad444c03fabb7d2aa003f9f2bdc0af39ff2149343800"} Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.609179 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.610793 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.615134 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.615507 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.615557 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.617168 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.617521 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.617693 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.619037 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-8bnbj" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.624041 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.661392 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671421 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9627032-9b68-4e48-8372-fabd9de3d289-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671465 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9627032-9b68-4e48-8372-fabd9de3d289-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671486 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671516 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671544 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671561 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671587 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671607 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671790 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671907 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-598t6\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-kube-api-access-598t6\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.671967 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.773085 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.773284 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.773690 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-598t6\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-kube-api-access-598t6\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.773866 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.774036 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9627032-9b68-4e48-8372-fabd9de3d289-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.774090 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.774113 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9627032-9b68-4e48-8372-fabd9de3d289-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.774539 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.774898 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.774986 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.775046 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.775068 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.775504 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.775547 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.775896 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.777064 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.777166 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.779329 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.780416 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9627032-9b68-4e48-8372-fabd9de3d289-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.788285 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.791110 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9627032-9b68-4e48-8372-fabd9de3d289-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.793592 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-598t6\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-kube-api-access-598t6\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.809957 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:06 crc kubenswrapper[4747]: I0202 09:10:06.948615 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.909795 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.913650 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.915983 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-mk5rz" Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.916896 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.917091 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.917204 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.921749 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 09:10:07 crc kubenswrapper[4747]: I0202 09:10:07.922716 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001159 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3089b6b4-c4d3-4717-a7d7-159dd27863ac-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001198 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3089b6b4-c4d3-4717-a7d7-159dd27863ac-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001247 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-kolla-config\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001289 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwggl\" (UniqueName: \"kubernetes.io/projected/3089b6b4-c4d3-4717-a7d7-159dd27863ac-kube-api-access-hwggl\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001388 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-config-data-default\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001476 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.001537 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3089b6b4-c4d3-4717-a7d7-159dd27863ac-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102568 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3089b6b4-c4d3-4717-a7d7-159dd27863ac-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102621 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3089b6b4-c4d3-4717-a7d7-159dd27863ac-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102646 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3089b6b4-c4d3-4717-a7d7-159dd27863ac-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102690 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-kolla-config\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102734 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwggl\" (UniqueName: \"kubernetes.io/projected/3089b6b4-c4d3-4717-a7d7-159dd27863ac-kube-api-access-hwggl\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102799 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102885 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-config-data-default\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.102921 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.103602 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3089b6b4-c4d3-4717-a7d7-159dd27863ac-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.103816 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.103954 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-kolla-config\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.104694 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.104912 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3089b6b4-c4d3-4717-a7d7-159dd27863ac-config-data-default\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.108564 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3089b6b4-c4d3-4717-a7d7-159dd27863ac-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.109253 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3089b6b4-c4d3-4717-a7d7-159dd27863ac-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.121517 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwggl\" (UniqueName: \"kubernetes.io/projected/3089b6b4-c4d3-4717-a7d7-159dd27863ac-kube-api-access-hwggl\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.135710 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"3089b6b4-c4d3-4717-a7d7-159dd27863ac\") " pod="openstack/openstack-galera-0" Feb 02 09:10:08 crc kubenswrapper[4747]: I0202 09:10:08.239926 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.268829 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.272603 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.276993 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.277528 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.277725 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.280001 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-hvdfj" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.292461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320114 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/03897c48-cfa1-4875-bd93-7b645923f47e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320151 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f7ln\" (UniqueName: \"kubernetes.io/projected/03897c48-cfa1-4875-bd93-7b645923f47e-kube-api-access-8f7ln\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320187 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320208 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/03897c48-cfa1-4875-bd93-7b645923f47e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320258 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03897c48-cfa1-4875-bd93-7b645923f47e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320278 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.320358 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421692 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421750 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/03897c48-cfa1-4875-bd93-7b645923f47e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421778 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f7ln\" (UniqueName: \"kubernetes.io/projected/03897c48-cfa1-4875-bd93-7b645923f47e-kube-api-access-8f7ln\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421836 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421861 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/03897c48-cfa1-4875-bd93-7b645923f47e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421900 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421925 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03897c48-cfa1-4875-bd93-7b645923f47e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.421970 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.423614 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/03897c48-cfa1-4875-bd93-7b645923f47e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.423704 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.423737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.424681 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.424859 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03897c48-cfa1-4875-bd93-7b645923f47e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.429799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/03897c48-cfa1-4875-bd93-7b645923f47e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.431874 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03897c48-cfa1-4875-bd93-7b645923f47e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.446600 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f7ln\" (UniqueName: \"kubernetes.io/projected/03897c48-cfa1-4875-bd93-7b645923f47e-kube-api-access-8f7ln\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.447803 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"03897c48-cfa1-4875-bd93-7b645923f47e\") " pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.591651 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.619727 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.620877 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.623028 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-gnjp2" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.623211 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.623249 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.632699 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" event={"ID":"eeae8944-5b29-42b8-86f2-d29056d767b9","Type":"ContainerStarted","Data":"485d7ab4efad206258f9e63558d3e47956f4f2e846661dd93b714cdd8080fe7c"} Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.632797 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.726739 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-config-data\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.726803 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.726846 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-kolla-config\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.726894 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhvlc\" (UniqueName: \"kubernetes.io/projected/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-kube-api-access-qhvlc\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.726991 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.828063 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.828186 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-config-data\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.828220 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.828256 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-kolla-config\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.828303 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhvlc\" (UniqueName: \"kubernetes.io/projected/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-kube-api-access-qhvlc\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.829312 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-kolla-config\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.829365 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-config-data\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.831369 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.847949 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhvlc\" (UniqueName: \"kubernetes.io/projected/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-kube-api-access-qhvlc\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.848404 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf\") " pod="openstack/memcached-0" Feb 02 09:10:09 crc kubenswrapper[4747]: I0202 09:10:09.948341 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.505884 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.507882 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.510706 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-gp7bw" Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.515959 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.560010 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrmkm\" (UniqueName: \"kubernetes.io/projected/b43bea3c-d709-4f84-a052-a9b2500eaa8a-kube-api-access-rrmkm\") pod \"kube-state-metrics-0\" (UID: \"b43bea3c-d709-4f84-a052-a9b2500eaa8a\") " pod="openstack/kube-state-metrics-0" Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.661730 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrmkm\" (UniqueName: \"kubernetes.io/projected/b43bea3c-d709-4f84-a052-a9b2500eaa8a-kube-api-access-rrmkm\") pod \"kube-state-metrics-0\" (UID: \"b43bea3c-d709-4f84-a052-a9b2500eaa8a\") " pod="openstack/kube-state-metrics-0" Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.682666 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrmkm\" (UniqueName: \"kubernetes.io/projected/b43bea3c-d709-4f84-a052-a9b2500eaa8a-kube-api-access-rrmkm\") pod \"kube-state-metrics-0\" (UID: \"b43bea3c-d709-4f84-a052-a9b2500eaa8a\") " pod="openstack/kube-state-metrics-0" Feb 02 09:10:11 crc kubenswrapper[4747]: I0202 09:10:11.832967 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.962486 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-vhb5g"] Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.964049 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.967776 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-nhs6q" Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.968120 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.968169 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.971032 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-bfdw2"] Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.972438 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.982590 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vhb5g"] Feb 02 09:10:14 crc kubenswrapper[4747]: I0202 09:10:14.991435 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bfdw2"] Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.011847 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-lib\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.011885 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6054cae9-07d3-4de6-ad28-2be1334c85c5-scripts\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.011917 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-run\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.011955 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-run-ovn\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.011982 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-run\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012005 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-etc-ovs\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012025 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6054cae9-07d3-4de6-ad28-2be1334c85c5-combined-ca-bundle\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012103 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mmpv\" (UniqueName: \"kubernetes.io/projected/de0ee51c-e1b1-4614-83bb-07a2d682694b-kube-api-access-4mmpv\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012228 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6054cae9-07d3-4de6-ad28-2be1334c85c5-ovn-controller-tls-certs\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-log-ovn\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012373 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-log\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012471 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5rpx\" (UniqueName: \"kubernetes.io/projected/6054cae9-07d3-4de6-ad28-2be1334c85c5-kube-api-access-t5rpx\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.012540 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de0ee51c-e1b1-4614-83bb-07a2d682694b-scripts\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114060 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-etc-ovs\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114107 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6054cae9-07d3-4de6-ad28-2be1334c85c5-combined-ca-bundle\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114132 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mmpv\" (UniqueName: \"kubernetes.io/projected/de0ee51c-e1b1-4614-83bb-07a2d682694b-kube-api-access-4mmpv\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114153 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6054cae9-07d3-4de6-ad28-2be1334c85c5-ovn-controller-tls-certs\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114573 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-etc-ovs\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114717 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-log-ovn\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114893 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-log-ovn\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114926 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-log\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.114980 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5rpx\" (UniqueName: \"kubernetes.io/projected/6054cae9-07d3-4de6-ad28-2be1334c85c5-kube-api-access-t5rpx\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115001 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de0ee51c-e1b1-4614-83bb-07a2d682694b-scripts\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115040 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-lib\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115057 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6054cae9-07d3-4de6-ad28-2be1334c85c5-scripts\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115087 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-run\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115110 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-run-ovn\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115144 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-run\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115185 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-lib\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115359 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-run\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115361 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-run\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115085 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/de0ee51c-e1b1-4614-83bb-07a2d682694b-var-log\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.115557 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6054cae9-07d3-4de6-ad28-2be1334c85c5-var-run-ovn\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.117209 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de0ee51c-e1b1-4614-83bb-07a2d682694b-scripts\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.117252 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6054cae9-07d3-4de6-ad28-2be1334c85c5-scripts\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.118555 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6054cae9-07d3-4de6-ad28-2be1334c85c5-ovn-controller-tls-certs\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.118602 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6054cae9-07d3-4de6-ad28-2be1334c85c5-combined-ca-bundle\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.134031 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5rpx\" (UniqueName: \"kubernetes.io/projected/6054cae9-07d3-4de6-ad28-2be1334c85c5-kube-api-access-t5rpx\") pod \"ovn-controller-vhb5g\" (UID: \"6054cae9-07d3-4de6-ad28-2be1334c85c5\") " pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.138656 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mmpv\" (UniqueName: \"kubernetes.io/projected/de0ee51c-e1b1-4614-83bb-07a2d682694b-kube-api-access-4mmpv\") pod \"ovn-controller-ovs-bfdw2\" (UID: \"de0ee51c-e1b1-4614-83bb-07a2d682694b\") " pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.287670 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:15 crc kubenswrapper[4747]: I0202 09:10:15.306477 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.791639 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.792762 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.795463 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.795776 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.796543 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.796852 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-tf62r" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.798364 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.816163 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842477 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842538 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/59c1604c-b4d8-4717-a68d-e372953a8a3f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842581 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59c1604c-b4d8-4717-a68d-e372953a8a3f-config\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842623 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842703 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/59c1604c-b4d8-4717-a68d-e372953a8a3f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842733 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842751 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkjps\" (UniqueName: \"kubernetes.io/projected/59c1604c-b4d8-4717-a68d-e372953a8a3f-kube-api-access-rkjps\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.842768 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.944138 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.944217 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/59c1604c-b4d8-4717-a68d-e372953a8a3f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.944249 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.944268 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkjps\" (UniqueName: \"kubernetes.io/projected/59c1604c-b4d8-4717-a68d-e372953a8a3f-kube-api-access-rkjps\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.944292 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.944658 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/59c1604c-b4d8-4717-a68d-e372953a8a3f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.944811 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.945262 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/59c1604c-b4d8-4717-a68d-e372953a8a3f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.945293 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59c1604c-b4d8-4717-a68d-e372953a8a3f-config\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.946131 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59c1604c-b4d8-4717-a68d-e372953a8a3f-config\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.946463 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/59c1604c-b4d8-4717-a68d-e372953a8a3f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.947292 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.949335 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.949514 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.950039 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/59c1604c-b4d8-4717-a68d-e372953a8a3f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.962769 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkjps\" (UniqueName: \"kubernetes.io/projected/59c1604c-b4d8-4717-a68d-e372953a8a3f-kube-api-access-rkjps\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:16 crc kubenswrapper[4747]: I0202 09:10:16.980440 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"59c1604c-b4d8-4717-a68d-e372953a8a3f\") " pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:17 crc kubenswrapper[4747]: I0202 09:10:17.111453 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:17 crc kubenswrapper[4747]: I0202 09:10:17.530423 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:10:17 crc kubenswrapper[4747]: W0202 09:10:17.936585 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9627032_9b68_4e48_8372_fabd9de3d289.slice/crio-9437999481dc46a111427999ffd79d7ee534b6df3b68cb43b511375e71f9caf1 WatchSource:0}: Error finding container 9437999481dc46a111427999ffd79d7ee534b6df3b68cb43b511375e71f9caf1: Status 404 returned error can't find the container with id 9437999481dc46a111427999ffd79d7ee534b6df3b68cb43b511375e71f9caf1 Feb 02 09:10:17 crc kubenswrapper[4747]: E0202 09:10:17.970177 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 09:10:17 crc kubenswrapper[4747]: E0202 09:10:17.970441 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sg92z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-xsj7x_openstack(1457832f-4905-4af5-8375-0507ac38680b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:10:17 crc kubenswrapper[4747]: E0202 09:10:17.972200 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" podUID="1457832f-4905-4af5-8375-0507ac38680b" Feb 02 09:10:17 crc kubenswrapper[4747]: E0202 09:10:17.978301 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 09:10:17 crc kubenswrapper[4747]: E0202 09:10:17.978556 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n4vbv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-x979d_openstack(d6e186f0-9833-4961-9dc3-317e4af5937b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:10:17 crc kubenswrapper[4747]: E0202 09:10:17.980198 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" podUID="d6e186f0-9833-4961-9dc3-317e4af5937b" Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.457643 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 09:10:18 crc kubenswrapper[4747]: E0202 09:10:18.493163 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedfd45e2_956e_498b_829f_f474dd4a893d.slice/crio-conmon-074653316de4ec14501c741ae575b03eb3383b61b6093af052a40afd6cb0f8b4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedfd45e2_956e_498b_829f_f474dd4a893d.slice/crio-074653316de4ec14501c741ae575b03eb3383b61b6093af052a40afd6cb0f8b4.scope\": RecentStats: unable to find data in memory cache]" Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.603267 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vhb5g"] Feb 02 09:10:18 crc kubenswrapper[4747]: W0202 09:10:18.614303 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6054cae9_07d3_4de6_ad28_2be1334c85c5.slice/crio-7ca8a3ed0110a7489e20840e57d5b5dc95cb9dcdbd956e8bf67a0ee7a25fea3b WatchSource:0}: Error finding container 7ca8a3ed0110a7489e20840e57d5b5dc95cb9dcdbd956e8bf67a0ee7a25fea3b: Status 404 returned error can't find the container with id 7ca8a3ed0110a7489e20840e57d5b5dc95cb9dcdbd956e8bf67a0ee7a25fea3b Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.634535 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.641474 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.647428 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.714479 4747 generic.go:334] "Generic (PLEG): container finished" podID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerID="7e94f79d6863aee05ec08e9852a228b6eb6d4d09bb6959be540205201c6c2ee9" exitCode=0 Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.714541 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" event={"ID":"eeae8944-5b29-42b8-86f2-d29056d767b9","Type":"ContainerDied","Data":"7e94f79d6863aee05ec08e9852a228b6eb6d4d09bb6959be540205201c6c2ee9"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.716199 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c9627032-9b68-4e48-8372-fabd9de3d289","Type":"ContainerStarted","Data":"9437999481dc46a111427999ffd79d7ee534b6df3b68cb43b511375e71f9caf1"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.717616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"94de1c50-3e73-4d3c-9f71-194b0fd4f005","Type":"ContainerStarted","Data":"f801ccd86058b8f875a268bb3f6aa42d28d8034d61e41f167735d9f24ce5af0d"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.718898 4747 generic.go:334] "Generic (PLEG): container finished" podID="edfd45e2-956e-498b-829f-f474dd4a893d" containerID="074653316de4ec14501c741ae575b03eb3383b61b6093af052a40afd6cb0f8b4" exitCode=0 Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.718947 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" event={"ID":"edfd45e2-956e-498b-829f-f474dd4a893d","Type":"ContainerDied","Data":"074653316de4ec14501c741ae575b03eb3383b61b6093af052a40afd6cb0f8b4"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.720505 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vhb5g" event={"ID":"6054cae9-07d3-4de6-ad28-2be1334c85c5","Type":"ContainerStarted","Data":"7ca8a3ed0110a7489e20840e57d5b5dc95cb9dcdbd956e8bf67a0ee7a25fea3b"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.722444 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf","Type":"ContainerStarted","Data":"12f8eaa62d376a2df39f35eb8f4893124fe8524493d691155e8f274cc98646c2"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.725269 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3089b6b4-c4d3-4717-a7d7-159dd27863ac","Type":"ContainerStarted","Data":"2928584bb2c071a8107abd02c7be2f73aa0858cb5ab2d357ef03b0f3af97ec5f"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.731990 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"03897c48-cfa1-4875-bd93-7b645923f47e","Type":"ContainerStarted","Data":"a99fee2e498493dc614be5eba49209c17362738962f7ed6607ded85fac000bb9"} Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.811713 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:10:18 crc kubenswrapper[4747]: I0202 09:10:18.903951 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.150817 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.173432 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.195393 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.197889 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.200115 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.200336 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.200506 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.200660 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-mmt7s" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.202689 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.208462 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-dns-svc\") pod \"d6e186f0-9833-4961-9dc3-317e4af5937b\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.208553 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/d6e186f0-9833-4961-9dc3-317e4af5937b-kube-api-access-n4vbv\") pod \"d6e186f0-9833-4961-9dc3-317e4af5937b\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.208600 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-config\") pod \"d6e186f0-9833-4961-9dc3-317e4af5937b\" (UID: \"d6e186f0-9833-4961-9dc3-317e4af5937b\") " Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.209238 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d6e186f0-9833-4961-9dc3-317e4af5937b" (UID: "d6e186f0-9833-4961-9dc3-317e4af5937b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.209254 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-config" (OuterVolumeSpecName: "config") pod "d6e186f0-9833-4961-9dc3-317e4af5937b" (UID: "d6e186f0-9833-4961-9dc3-317e4af5937b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.217405 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6e186f0-9833-4961-9dc3-317e4af5937b-kube-api-access-n4vbv" (OuterVolumeSpecName: "kube-api-access-n4vbv") pod "d6e186f0-9833-4961-9dc3-317e4af5937b" (UID: "d6e186f0-9833-4961-9dc3-317e4af5937b"). InnerVolumeSpecName "kube-api-access-n4vbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.309730 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg92z\" (UniqueName: \"kubernetes.io/projected/1457832f-4905-4af5-8375-0507ac38680b-kube-api-access-sg92z\") pod \"1457832f-4905-4af5-8375-0507ac38680b\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310318 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457832f-4905-4af5-8375-0507ac38680b-config\") pod \"1457832f-4905-4af5-8375-0507ac38680b\" (UID: \"1457832f-4905-4af5-8375-0507ac38680b\") " Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310619 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/75232e08-a5a1-4971-893e-24c3503ff693-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310655 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310783 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75232e08-a5a1-4971-893e-24c3503ff693-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310807 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hxwq\" (UniqueName: \"kubernetes.io/projected/75232e08-a5a1-4971-893e-24c3503ff693-kube-api-access-9hxwq\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310827 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310869 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310873 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1457832f-4905-4af5-8375-0507ac38680b-config" (OuterVolumeSpecName: "config") pod "1457832f-4905-4af5-8375-0507ac38680b" (UID: "1457832f-4905-4af5-8375-0507ac38680b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.310892 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75232e08-a5a1-4971-893e-24c3503ff693-config\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.312197 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.312237 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457832f-4905-4af5-8375-0507ac38680b-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.312252 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/d6e186f0-9833-4961-9dc3-317e4af5937b-kube-api-access-n4vbv\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.312265 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e186f0-9833-4961-9dc3-317e4af5937b-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.312670 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1457832f-4905-4af5-8375-0507ac38680b-kube-api-access-sg92z" (OuterVolumeSpecName: "kube-api-access-sg92z") pod "1457832f-4905-4af5-8375-0507ac38680b" (UID: "1457832f-4905-4af5-8375-0507ac38680b"). InnerVolumeSpecName "kube-api-access-sg92z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413377 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/75232e08-a5a1-4971-893e-24c3503ff693-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413439 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413503 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75232e08-a5a1-4971-893e-24c3503ff693-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hxwq\" (UniqueName: \"kubernetes.io/projected/75232e08-a5a1-4971-893e-24c3503ff693-kube-api-access-9hxwq\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413543 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413587 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413606 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75232e08-a5a1-4971-893e-24c3503ff693-config\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.413695 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg92z\" (UniqueName: \"kubernetes.io/projected/1457832f-4905-4af5-8375-0507ac38680b-kube-api-access-sg92z\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.414457 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/75232e08-a5a1-4971-893e-24c3503ff693-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.414568 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.414970 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75232e08-a5a1-4971-893e-24c3503ff693-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.415272 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75232e08-a5a1-4971-893e-24c3503ff693-config\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.418709 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.418756 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.423434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/75232e08-a5a1-4971-893e-24c3503ff693-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.435581 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hxwq\" (UniqueName: \"kubernetes.io/projected/75232e08-a5a1-4971-893e-24c3503ff693-kube-api-access-9hxwq\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.443526 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"75232e08-a5a1-4971-893e-24c3503ff693\") " pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.549541 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.744836 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b43bea3c-d709-4f84-a052-a9b2500eaa8a","Type":"ContainerStarted","Data":"0f12f5fd884f3646f42cf1242a636d661a0331bf1c9715fd7d4a4ba28036c52f"} Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.747256 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" event={"ID":"edfd45e2-956e-498b-829f-f474dd4a893d","Type":"ContainerStarted","Data":"bbe2e533734b5e5c2ad619069e87d1aa0301a672154f4fe954ce17ed11acbf7f"} Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.747334 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.749221 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" event={"ID":"d6e186f0-9833-4961-9dc3-317e4af5937b","Type":"ContainerDied","Data":"99790a34d3340115d126f94506bbda69e81c28fd102d106b9eaeb6c644542439"} Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.749277 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x979d" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.751171 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"59c1604c-b4d8-4717-a68d-e372953a8a3f","Type":"ContainerStarted","Data":"75fccbfd9a906ecf419d5dafab8c77087e5037b0ace4404def7e3cfb2673d867"} Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.754380 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" event={"ID":"eeae8944-5b29-42b8-86f2-d29056d767b9","Type":"ContainerStarted","Data":"771c1636792fcacede32a4e58c47fcc0fd69b4a6cf71ce3de9132c010ca4a1cb"} Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.754520 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.757799 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" event={"ID":"1457832f-4905-4af5-8375-0507ac38680b","Type":"ContainerDied","Data":"e946c0ba50fc7b2966874cbb9d5e563cdda8e9221a0b2ee5fa44f7ab98ff8b93"} Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.757894 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xsj7x" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.767138 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" podStartSLOduration=2.714076845 podStartE2EDuration="14.767122927s" podCreationTimestamp="2026-02-02 09:10:05 +0000 UTC" firstStartedPulling="2026-02-02 09:10:06.030744803 +0000 UTC m=+818.575083236" lastFinishedPulling="2026-02-02 09:10:18.083790885 +0000 UTC m=+830.628129318" observedRunningTime="2026-02-02 09:10:19.762074801 +0000 UTC m=+832.306413234" watchObservedRunningTime="2026-02-02 09:10:19.767122927 +0000 UTC m=+832.311461360" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.800060 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bfdw2"] Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.803472 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" podStartSLOduration=5.512348678 podStartE2EDuration="14.803452803s" podCreationTimestamp="2026-02-02 09:10:05 +0000 UTC" firstStartedPulling="2026-02-02 09:10:08.81194745 +0000 UTC m=+821.356285883" lastFinishedPulling="2026-02-02 09:10:18.103051575 +0000 UTC m=+830.647390008" observedRunningTime="2026-02-02 09:10:19.792891059 +0000 UTC m=+832.337229502" watchObservedRunningTime="2026-02-02 09:10:19.803452803 +0000 UTC m=+832.347791236" Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.837304 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x979d"] Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.846028 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x979d"] Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.867415 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xsj7x"] Feb 02 09:10:19 crc kubenswrapper[4747]: I0202 09:10:19.874672 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xsj7x"] Feb 02 09:10:20 crc kubenswrapper[4747]: I0202 09:10:20.351958 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1457832f-4905-4af5-8375-0507ac38680b" path="/var/lib/kubelet/pods/1457832f-4905-4af5-8375-0507ac38680b/volumes" Feb 02 09:10:20 crc kubenswrapper[4747]: I0202 09:10:20.352485 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6e186f0-9833-4961-9dc3-317e4af5937b" path="/var/lib/kubelet/pods/d6e186f0-9833-4961-9dc3-317e4af5937b/volumes" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.415323 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.482744 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-cqqzs"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.483620 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.485356 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.498225 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-cqqzs"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.567748 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a214831a-017f-45e4-9040-e9d7c8db06f7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.568040 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a214831a-017f-45e4-9040-e9d7c8db06f7-ovs-rundir\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.568137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zj88\" (UniqueName: \"kubernetes.io/projected/a214831a-017f-45e4-9040-e9d7c8db06f7-kube-api-access-4zj88\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.568284 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a214831a-017f-45e4-9040-e9d7c8db06f7-ovn-rundir\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.568376 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a214831a-017f-45e4-9040-e9d7c8db06f7-combined-ca-bundle\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.568552 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a214831a-017f-45e4-9040-e9d7c8db06f7-config\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.626253 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-s9dss"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.654642 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fp849"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.656104 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.658365 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.669518 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fp849"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.670411 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a214831a-017f-45e4-9040-e9d7c8db06f7-config\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.670469 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a214831a-017f-45e4-9040-e9d7c8db06f7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.670517 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a214831a-017f-45e4-9040-e9d7c8db06f7-ovs-rundir\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.670539 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zj88\" (UniqueName: \"kubernetes.io/projected/a214831a-017f-45e4-9040-e9d7c8db06f7-kube-api-access-4zj88\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.670595 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a214831a-017f-45e4-9040-e9d7c8db06f7-ovn-rundir\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.670621 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a214831a-017f-45e4-9040-e9d7c8db06f7-combined-ca-bundle\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.671133 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a214831a-017f-45e4-9040-e9d7c8db06f7-ovs-rundir\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.672515 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a214831a-017f-45e4-9040-e9d7c8db06f7-ovn-rundir\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.674861 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a214831a-017f-45e4-9040-e9d7c8db06f7-config\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.692872 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a214831a-017f-45e4-9040-e9d7c8db06f7-combined-ca-bundle\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.692876 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a214831a-017f-45e4-9040-e9d7c8db06f7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.695588 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zj88\" (UniqueName: \"kubernetes.io/projected/a214831a-017f-45e4-9040-e9d7c8db06f7-kube-api-access-4zj88\") pod \"ovn-controller-metrics-cqqzs\" (UID: \"a214831a-017f-45e4-9040-e9d7c8db06f7\") " pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.756175 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rzpgc"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.775589 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-khjv9"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.776582 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.776641 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htvn8\" (UniqueName: \"kubernetes.io/projected/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-kube-api-access-htvn8\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.776690 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-config\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.776762 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.778584 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" containerName="dnsmasq-dns" containerID="cri-o://bbe2e533734b5e5c2ad619069e87d1aa0301a672154f4fe954ce17ed11acbf7f" gracePeriod=10 Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.778814 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerName="dnsmasq-dns" containerID="cri-o://771c1636792fcacede32a4e58c47fcc0fd69b4a6cf71ce3de9132c010ca4a1cb" gracePeriod=10 Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.779768 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.782681 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.796551 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-khjv9"] Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.826249 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-cqqzs" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878008 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-config\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878063 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-config\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878097 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878122 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4frh\" (UniqueName: \"kubernetes.io/projected/12c98ada-7b2c-458c-b998-998febe86c53-kube-api-access-g4frh\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878167 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878197 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878224 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.878247 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htvn8\" (UniqueName: \"kubernetes.io/projected/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-kube-api-access-htvn8\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.879249 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-config\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.879787 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.880321 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.893559 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htvn8\" (UniqueName: \"kubernetes.io/projected/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-kube-api-access-htvn8\") pod \"dnsmasq-dns-7fd796d7df-fp849\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.979327 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.979407 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-config\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.979449 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.979471 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4frh\" (UniqueName: \"kubernetes.io/projected/12c98ada-7b2c-458c-b998-998febe86c53-kube-api-access-g4frh\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.979487 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.980220 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.980745 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.981243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-config\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.981741 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:21 crc kubenswrapper[4747]: I0202 09:10:21.996966 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4frh\" (UniqueName: \"kubernetes.io/projected/12c98ada-7b2c-458c-b998-998febe86c53-kube-api-access-g4frh\") pod \"dnsmasq-dns-86db49b7ff-khjv9\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.047137 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.116669 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:22 crc kubenswrapper[4747]: W0202 09:10:22.316439 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde0ee51c_e1b1_4614_83bb_07a2d682694b.slice/crio-3185165dd3974332ac275e25607cf73c1a886c2883211ce4a472d687758188fc WatchSource:0}: Error finding container 3185165dd3974332ac275e25607cf73c1a886c2883211ce4a472d687758188fc: Status 404 returned error can't find the container with id 3185165dd3974332ac275e25607cf73c1a886c2883211ce4a472d687758188fc Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.344151 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:10:22 crc kubenswrapper[4747]: W0202 09:10:22.358084 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75232e08_a5a1_4971_893e_24c3503ff693.slice/crio-340d0b8325bbd1fba4c57724d9e9223b6e9efdc34e469312d5d2f4ca4a25a01b WatchSource:0}: Error finding container 340d0b8325bbd1fba4c57724d9e9223b6e9efdc34e469312d5d2f4ca4a25a01b: Status 404 returned error can't find the container with id 340d0b8325bbd1fba4c57724d9e9223b6e9efdc34e469312d5d2f4ca4a25a01b Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.784142 4747 generic.go:334] "Generic (PLEG): container finished" podID="edfd45e2-956e-498b-829f-f474dd4a893d" containerID="bbe2e533734b5e5c2ad619069e87d1aa0301a672154f4fe954ce17ed11acbf7f" exitCode=0 Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.784342 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" event={"ID":"edfd45e2-956e-498b-829f-f474dd4a893d","Type":"ContainerDied","Data":"bbe2e533734b5e5c2ad619069e87d1aa0301a672154f4fe954ce17ed11acbf7f"} Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.786724 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bfdw2" event={"ID":"de0ee51c-e1b1-4614-83bb-07a2d682694b","Type":"ContainerStarted","Data":"3185165dd3974332ac275e25607cf73c1a886c2883211ce4a472d687758188fc"} Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.790817 4747 generic.go:334] "Generic (PLEG): container finished" podID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerID="771c1636792fcacede32a4e58c47fcc0fd69b4a6cf71ce3de9132c010ca4a1cb" exitCode=0 Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.790864 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" event={"ID":"eeae8944-5b29-42b8-86f2-d29056d767b9","Type":"ContainerDied","Data":"771c1636792fcacede32a4e58c47fcc0fd69b4a6cf71ce3de9132c010ca4a1cb"} Feb 02 09:10:22 crc kubenswrapper[4747]: I0202 09:10:22.791877 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"75232e08-a5a1-4971-893e-24c3503ff693","Type":"ContainerStarted","Data":"340d0b8325bbd1fba4c57724d9e9223b6e9efdc34e469312d5d2f4ca4a25a01b"} Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.358401 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.364474 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.445854 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-dns-svc\") pod \"edfd45e2-956e-498b-829f-f474dd4a893d\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.445909 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-dns-svc\") pod \"eeae8944-5b29-42b8-86f2-d29056d767b9\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.446023 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfmpt\" (UniqueName: \"kubernetes.io/projected/eeae8944-5b29-42b8-86f2-d29056d767b9-kube-api-access-vfmpt\") pod \"eeae8944-5b29-42b8-86f2-d29056d767b9\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.446091 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bssnb\" (UniqueName: \"kubernetes.io/projected/edfd45e2-956e-498b-829f-f474dd4a893d-kube-api-access-bssnb\") pod \"edfd45e2-956e-498b-829f-f474dd4a893d\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.446138 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-config\") pod \"edfd45e2-956e-498b-829f-f474dd4a893d\" (UID: \"edfd45e2-956e-498b-829f-f474dd4a893d\") " Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.446209 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-config\") pod \"eeae8944-5b29-42b8-86f2-d29056d767b9\" (UID: \"eeae8944-5b29-42b8-86f2-d29056d767b9\") " Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.451450 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeae8944-5b29-42b8-86f2-d29056d767b9-kube-api-access-vfmpt" (OuterVolumeSpecName: "kube-api-access-vfmpt") pod "eeae8944-5b29-42b8-86f2-d29056d767b9" (UID: "eeae8944-5b29-42b8-86f2-d29056d767b9"). InnerVolumeSpecName "kube-api-access-vfmpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.452328 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edfd45e2-956e-498b-829f-f474dd4a893d-kube-api-access-bssnb" (OuterVolumeSpecName: "kube-api-access-bssnb") pod "edfd45e2-956e-498b-829f-f474dd4a893d" (UID: "edfd45e2-956e-498b-829f-f474dd4a893d"). InnerVolumeSpecName "kube-api-access-bssnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.478241 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eeae8944-5b29-42b8-86f2-d29056d767b9" (UID: "eeae8944-5b29-42b8-86f2-d29056d767b9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.482840 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "edfd45e2-956e-498b-829f-f474dd4a893d" (UID: "edfd45e2-956e-498b-829f-f474dd4a893d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.485286 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-config" (OuterVolumeSpecName: "config") pod "edfd45e2-956e-498b-829f-f474dd4a893d" (UID: "edfd45e2-956e-498b-829f-f474dd4a893d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.492021 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-config" (OuterVolumeSpecName: "config") pod "eeae8944-5b29-42b8-86f2-d29056d767b9" (UID: "eeae8944-5b29-42b8-86f2-d29056d767b9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.548082 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.548117 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.548125 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eeae8944-5b29-42b8-86f2-d29056d767b9-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.548134 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfmpt\" (UniqueName: \"kubernetes.io/projected/eeae8944-5b29-42b8-86f2-d29056d767b9-kube-api-access-vfmpt\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.548144 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bssnb\" (UniqueName: \"kubernetes.io/projected/edfd45e2-956e-498b-829f-f474dd4a893d-kube-api-access-bssnb\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.548151 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edfd45e2-956e-498b-829f-f474dd4a893d-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.823309 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" event={"ID":"edfd45e2-956e-498b-829f-f474dd4a893d","Type":"ContainerDied","Data":"2a6cac38ab961cee58f5ad444c03fabb7d2aa003f9f2bdc0af39ff2149343800"} Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.823379 4747 scope.go:117] "RemoveContainer" containerID="bbe2e533734b5e5c2ad619069e87d1aa0301a672154f4fe954ce17ed11acbf7f" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.823336 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.827172 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" event={"ID":"eeae8944-5b29-42b8-86f2-d29056d767b9","Type":"ContainerDied","Data":"485d7ab4efad206258f9e63558d3e47956f4f2e846661dd93b714cdd8080fe7c"} Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.827249 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.855000 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rzpgc"] Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.861920 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rzpgc"] Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.875443 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-s9dss"] Feb 02 09:10:26 crc kubenswrapper[4747]: I0202 09:10:26.880239 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-s9dss"] Feb 02 09:10:27 crc kubenswrapper[4747]: I0202 09:10:27.391402 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fp849"] Feb 02 09:10:27 crc kubenswrapper[4747]: I0202 09:10:27.429948 4747 scope.go:117] "RemoveContainer" containerID="074653316de4ec14501c741ae575b03eb3383b61b6093af052a40afd6cb0f8b4" Feb 02 09:10:27 crc kubenswrapper[4747]: I0202 09:10:27.603201 4747 scope.go:117] "RemoveContainer" containerID="771c1636792fcacede32a4e58c47fcc0fd69b4a6cf71ce3de9132c010ca4a1cb" Feb 02 09:10:27 crc kubenswrapper[4747]: I0202 09:10:27.680406 4747 scope.go:117] "RemoveContainer" containerID="7e94f79d6863aee05ec08e9852a228b6eb6d4d09bb6959be540205201c6c2ee9" Feb 02 09:10:27 crc kubenswrapper[4747]: I0202 09:10:27.710264 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-khjv9"] Feb 02 09:10:27 crc kubenswrapper[4747]: I0202 09:10:27.821531 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-cqqzs"] Feb 02 09:10:27 crc kubenswrapper[4747]: I0202 09:10:27.838113 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" event={"ID":"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e","Type":"ContainerStarted","Data":"664bc551059432f468d1fcfd9892a19ef0521096cd79fd0201a519e59536c77d"} Feb 02 09:10:28 crc kubenswrapper[4747]: W0202 09:10:28.173423 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12c98ada_7b2c_458c_b998_998febe86c53.slice/crio-b9c4aa10cd2fab1ce40fc5759341dcc07974b5b04de10097070d4065814004ad WatchSource:0}: Error finding container b9c4aa10cd2fab1ce40fc5759341dcc07974b5b04de10097070d4065814004ad: Status 404 returned error can't find the container with id b9c4aa10cd2fab1ce40fc5759341dcc07974b5b04de10097070d4065814004ad Feb 02 09:10:28 crc kubenswrapper[4747]: W0202 09:10:28.195198 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda214831a_017f_45e4_9040_e9d7c8db06f7.slice/crio-83da48d61ab65ed550469fcb73a1fd89b76ef62d63d5e7da7a8bd10658780ec0 WatchSource:0}: Error finding container 83da48d61ab65ed550469fcb73a1fd89b76ef62d63d5e7da7a8bd10658780ec0: Status 404 returned error can't find the container with id 83da48d61ab65ed550469fcb73a1fd89b76ef62d63d5e7da7a8bd10658780ec0 Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.381561 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" path="/var/lib/kubelet/pods/edfd45e2-956e-498b-829f-f474dd4a893d/volumes" Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.382583 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" path="/var/lib/kubelet/pods/eeae8944-5b29-42b8-86f2-d29056d767b9/volumes" Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.850616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" event={"ID":"12c98ada-7b2c-458c-b998-998febe86c53","Type":"ContainerStarted","Data":"960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.850656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" event={"ID":"12c98ada-7b2c-458c-b998-998febe86c53","Type":"ContainerStarted","Data":"b9c4aa10cd2fab1ce40fc5759341dcc07974b5b04de10097070d4065814004ad"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.855842 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf","Type":"ContainerStarted","Data":"522f2ef031fa09eb1bf04f337129fddaf54bb7563a6bcf5634fb2dc749a5042f"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.855983 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.857606 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3089b6b4-c4d3-4717-a7d7-159dd27863ac","Type":"ContainerStarted","Data":"2b8b5182a818b7472f76db10f9c2ae1097c8b3edbf6592c472be70c287155a3f"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.863070 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"59c1604c-b4d8-4717-a68d-e372953a8a3f","Type":"ContainerStarted","Data":"bc46fa06802af8e097244ca785c23c5d6bb811121fb1970bbff1cffc4802aab0"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.870568 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"75232e08-a5a1-4971-893e-24c3503ff693","Type":"ContainerStarted","Data":"687cd7f995b7da89f20fd4dd57fdf2301b8d55c8b5169605cc6977700d791b85"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.874017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b43bea3c-d709-4f84-a052-a9b2500eaa8a","Type":"ContainerStarted","Data":"ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.874230 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.876896 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-cqqzs" event={"ID":"a214831a-017f-45e4-9040-e9d7c8db06f7","Type":"ContainerStarted","Data":"83da48d61ab65ed550469fcb73a1fd89b76ef62d63d5e7da7a8bd10658780ec0"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.880316 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vhb5g" event={"ID":"6054cae9-07d3-4de6-ad28-2be1334c85c5","Type":"ContainerStarted","Data":"9979bb75f51a918681802bb5acfaa24c59853b5e294edefadaf1bf13462ea4e2"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.880439 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-vhb5g" Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.888489 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=11.957217232 podStartE2EDuration="19.88846862s" podCreationTimestamp="2026-02-02 09:10:09 +0000 UTC" firstStartedPulling="2026-02-02 09:10:18.655196203 +0000 UTC m=+831.199534636" lastFinishedPulling="2026-02-02 09:10:26.586447571 +0000 UTC m=+839.130786024" observedRunningTime="2026-02-02 09:10:28.885490656 +0000 UTC m=+841.429829089" watchObservedRunningTime="2026-02-02 09:10:28.88846862 +0000 UTC m=+841.432807043" Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.892993 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bfdw2" event={"ID":"de0ee51c-e1b1-4614-83bb-07a2d682694b","Type":"ContainerStarted","Data":"13192ab490588148fdfac3823147afcd445608d7f1a536d05e016aa1dd9e2b6f"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.895871 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"03897c48-cfa1-4875-bd93-7b645923f47e","Type":"ContainerStarted","Data":"7a33de5ab4122d66f0207af2c9c5df92ecb1844f9131a705c3fb299d31fbb447"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.900505 4747 generic.go:334] "Generic (PLEG): container finished" podID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerID="3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676" exitCode=0 Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.900547 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" event={"ID":"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e","Type":"ContainerDied","Data":"3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676"} Feb 02 09:10:28 crc kubenswrapper[4747]: I0202 09:10:28.959213 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-vhb5g" podStartSLOduration=6.062890673 podStartE2EDuration="14.959190433s" podCreationTimestamp="2026-02-02 09:10:14 +0000 UTC" firstStartedPulling="2026-02-02 09:10:18.617302698 +0000 UTC m=+831.161641131" lastFinishedPulling="2026-02-02 09:10:27.513602458 +0000 UTC m=+840.057940891" observedRunningTime="2026-02-02 09:10:28.954454825 +0000 UTC m=+841.498793278" watchObservedRunningTime="2026-02-02 09:10:28.959190433 +0000 UTC m=+841.503528866" Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.016859 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=8.56142722 podStartE2EDuration="18.016840861s" podCreationTimestamp="2026-02-02 09:10:11 +0000 UTC" firstStartedPulling="2026-02-02 09:10:18.818468504 +0000 UTC m=+831.362806937" lastFinishedPulling="2026-02-02 09:10:28.273882145 +0000 UTC m=+840.818220578" observedRunningTime="2026-02-02 09:10:29.01279088 +0000 UTC m=+841.557129303" watchObservedRunningTime="2026-02-02 09:10:29.016840861 +0000 UTC m=+841.561179294" Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.908727 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"94de1c50-3e73-4d3c-9f71-194b0fd4f005","Type":"ContainerStarted","Data":"8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085"} Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.910503 4747 generic.go:334] "Generic (PLEG): container finished" podID="de0ee51c-e1b1-4614-83bb-07a2d682694b" containerID="13192ab490588148fdfac3823147afcd445608d7f1a536d05e016aa1dd9e2b6f" exitCode=0 Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.910555 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bfdw2" event={"ID":"de0ee51c-e1b1-4614-83bb-07a2d682694b","Type":"ContainerDied","Data":"13192ab490588148fdfac3823147afcd445608d7f1a536d05e016aa1dd9e2b6f"} Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.913651 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" event={"ID":"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e","Type":"ContainerStarted","Data":"cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2"} Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.913799 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.915888 4747 generic.go:334] "Generic (PLEG): container finished" podID="12c98ada-7b2c-458c-b998-998febe86c53" containerID="960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e" exitCode=0 Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.915972 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" event={"ID":"12c98ada-7b2c-458c-b998-998febe86c53","Type":"ContainerDied","Data":"960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e"} Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.916000 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" event={"ID":"12c98ada-7b2c-458c-b998-998febe86c53","Type":"ContainerStarted","Data":"9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375"} Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.916112 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.918131 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c9627032-9b68-4e48-8372-fabd9de3d289","Type":"ContainerStarted","Data":"2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40"} Feb 02 09:10:29 crc kubenswrapper[4747]: I0202 09:10:29.990309 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" podStartSLOduration=8.990288013 podStartE2EDuration="8.990288013s" podCreationTimestamp="2026-02-02 09:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:10:29.988088218 +0000 UTC m=+842.532426691" watchObservedRunningTime="2026-02-02 09:10:29.990288013 +0000 UTC m=+842.534626446" Feb 02 09:10:30 crc kubenswrapper[4747]: I0202 09:10:30.009757 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" podStartSLOduration=9.009737688 podStartE2EDuration="9.009737688s" podCreationTimestamp="2026-02-02 09:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:10:30.006327013 +0000 UTC m=+842.550665466" watchObservedRunningTime="2026-02-02 09:10:30.009737688 +0000 UTC m=+842.554076121" Feb 02 09:10:30 crc kubenswrapper[4747]: I0202 09:10:30.506417 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-rzpgc" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.93:5353: i/o timeout" Feb 02 09:10:30 crc kubenswrapper[4747]: I0202 09:10:30.823248 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-s9dss" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.94:5353: i/o timeout" Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.951060 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.961259 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"59c1604c-b4d8-4717-a68d-e372953a8a3f","Type":"ContainerStarted","Data":"d76e2b0786aa788da448ca5e536aa83c3c3f5d23262c5d08bb6bc02d2d0760f8"} Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.963719 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"75232e08-a5a1-4971-893e-24c3503ff693","Type":"ContainerStarted","Data":"1fad77b57b63e95a0e99b2ad160346859eed4efc14b1f383f64767c7509e2e21"} Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.972456 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-cqqzs" event={"ID":"a214831a-017f-45e4-9040-e9d7c8db06f7","Type":"ContainerStarted","Data":"e32d832fe502a026eb768a185beb278190d193a34adcf642b22307cf8539f015"} Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.975370 4747 generic.go:334] "Generic (PLEG): container finished" podID="3089b6b4-c4d3-4717-a7d7-159dd27863ac" containerID="2b8b5182a818b7472f76db10f9c2ae1097c8b3edbf6592c472be70c287155a3f" exitCode=0 Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.975431 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3089b6b4-c4d3-4717-a7d7-159dd27863ac","Type":"ContainerDied","Data":"2b8b5182a818b7472f76db10f9c2ae1097c8b3edbf6592c472be70c287155a3f"} Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.994120 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bfdw2" event={"ID":"de0ee51c-e1b1-4614-83bb-07a2d682694b","Type":"ContainerStarted","Data":"56c4e026be85a6096ff4a4d077cc17e640cd7977ba4b286eb895537771ae196d"} Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.999320 4747 generic.go:334] "Generic (PLEG): container finished" podID="03897c48-cfa1-4875-bd93-7b645923f47e" containerID="7a33de5ab4122d66f0207af2c9c5df92ecb1844f9131a705c3fb299d31fbb447" exitCode=0 Feb 02 09:10:34 crc kubenswrapper[4747]: I0202 09:10:34.999369 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"03897c48-cfa1-4875-bd93-7b645923f47e","Type":"ContainerDied","Data":"7a33de5ab4122d66f0207af2c9c5df92ecb1844f9131a705c3fb299d31fbb447"} Feb 02 09:10:35 crc kubenswrapper[4747]: I0202 09:10:35.000169 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=8.646239517 podStartE2EDuration="20.000150016s" podCreationTimestamp="2026-02-02 09:10:15 +0000 UTC" firstStartedPulling="2026-02-02 09:10:18.908270743 +0000 UTC m=+831.452609176" lastFinishedPulling="2026-02-02 09:10:30.262181221 +0000 UTC m=+842.806519675" observedRunningTime="2026-02-02 09:10:34.986527566 +0000 UTC m=+847.530866009" watchObservedRunningTime="2026-02-02 09:10:35.000150016 +0000 UTC m=+847.544488449" Feb 02 09:10:35 crc kubenswrapper[4747]: I0202 09:10:35.037649 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=9.133243323 podStartE2EDuration="17.037629431s" podCreationTimestamp="2026-02-02 09:10:18 +0000 UTC" firstStartedPulling="2026-02-02 09:10:22.361162637 +0000 UTC m=+834.905501070" lastFinishedPulling="2026-02-02 09:10:30.265548745 +0000 UTC m=+842.809887178" observedRunningTime="2026-02-02 09:10:35.015241132 +0000 UTC m=+847.559579565" watchObservedRunningTime="2026-02-02 09:10:35.037629431 +0000 UTC m=+847.581967854" Feb 02 09:10:35 crc kubenswrapper[4747]: I0202 09:10:35.058282 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-cqqzs" podStartSLOduration=12.006296262 podStartE2EDuration="14.058265156s" podCreationTimestamp="2026-02-02 09:10:21 +0000 UTC" firstStartedPulling="2026-02-02 09:10:28.220423392 +0000 UTC m=+840.764761825" lastFinishedPulling="2026-02-02 09:10:30.272392276 +0000 UTC m=+842.816730719" observedRunningTime="2026-02-02 09:10:35.050999034 +0000 UTC m=+847.595337477" watchObservedRunningTime="2026-02-02 09:10:35.058265156 +0000 UTC m=+847.602603589" Feb 02 09:10:35 crc kubenswrapper[4747]: I0202 09:10:35.112519 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:35 crc kubenswrapper[4747]: I0202 09:10:35.154730 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.017923 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3089b6b4-c4d3-4717-a7d7-159dd27863ac","Type":"ContainerStarted","Data":"12d3e90267e16c2e39e08121eb43c584254ae60f8b7e1d84dc920c2503897b08"} Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.021086 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bfdw2" event={"ID":"de0ee51c-e1b1-4614-83bb-07a2d682694b","Type":"ContainerStarted","Data":"666056650d206a39c40daed99037fce79a609239b8cdebdbcf6d4491df0efbc2"} Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.021386 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.023285 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"03897c48-cfa1-4875-bd93-7b645923f47e","Type":"ContainerStarted","Data":"5118c1e184dfdbd3231a6b96b7e266772dfc570cd22a0ade3b913c7bf7e3cd9b"} Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.024175 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.046501 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=21.699223775 podStartE2EDuration="30.046485186s" podCreationTimestamp="2026-02-02 09:10:06 +0000 UTC" firstStartedPulling="2026-02-02 09:10:18.557530167 +0000 UTC m=+831.101868600" lastFinishedPulling="2026-02-02 09:10:26.904791578 +0000 UTC m=+839.449130011" observedRunningTime="2026-02-02 09:10:36.041367948 +0000 UTC m=+848.585706391" watchObservedRunningTime="2026-02-02 09:10:36.046485186 +0000 UTC m=+848.590823619" Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.069274 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=19.510920311 podStartE2EDuration="28.069254574s" podCreationTimestamp="2026-02-02 09:10:08 +0000 UTC" firstStartedPulling="2026-02-02 09:10:18.655258204 +0000 UTC m=+831.199596637" lastFinishedPulling="2026-02-02 09:10:27.213592467 +0000 UTC m=+839.757930900" observedRunningTime="2026-02-02 09:10:36.061626393 +0000 UTC m=+848.605964836" watchObservedRunningTime="2026-02-02 09:10:36.069254574 +0000 UTC m=+848.613593017" Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.074346 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 02 09:10:36 crc kubenswrapper[4747]: I0202 09:10:36.089724 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-bfdw2" podStartSLOduration=16.919781887 podStartE2EDuration="22.089529369s" podCreationTimestamp="2026-02-02 09:10:14 +0000 UTC" firstStartedPulling="2026-02-02 09:10:22.343856136 +0000 UTC m=+834.888194569" lastFinishedPulling="2026-02-02 09:10:27.513603618 +0000 UTC m=+840.057942051" observedRunningTime="2026-02-02 09:10:36.08113291 +0000 UTC m=+848.625471343" watchObservedRunningTime="2026-02-02 09:10:36.089529369 +0000 UTC m=+848.633867812" Feb 02 09:10:37 crc kubenswrapper[4747]: I0202 09:10:37.030567 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:10:37 crc kubenswrapper[4747]: I0202 09:10:37.049142 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:37 crc kubenswrapper[4747]: I0202 09:10:37.118735 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:37 crc kubenswrapper[4747]: I0202 09:10:37.169471 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fp849"] Feb 02 09:10:37 crc kubenswrapper[4747]: I0202 09:10:37.549703 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:37 crc kubenswrapper[4747]: I0202 09:10:37.596528 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.037419 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.038259 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" podUID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerName="dnsmasq-dns" containerID="cri-o://cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2" gracePeriod=10 Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.090499 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.241411 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.241728 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.247591 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 02 09:10:38 crc kubenswrapper[4747]: E0202 09:10:38.247872 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerName="dnsmasq-dns" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.247887 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerName="dnsmasq-dns" Feb 02 09:10:38 crc kubenswrapper[4747]: E0202 09:10:38.247922 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" containerName="init" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.247928 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" containerName="init" Feb 02 09:10:38 crc kubenswrapper[4747]: E0202 09:10:38.247958 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" containerName="dnsmasq-dns" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.247963 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" containerName="dnsmasq-dns" Feb 02 09:10:38 crc kubenswrapper[4747]: E0202 09:10:38.247988 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerName="init" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.247994 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerName="init" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.248130 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeae8944-5b29-42b8-86f2-d29056d767b9" containerName="dnsmasq-dns" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.248141 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="edfd45e2-956e-498b-829f-f474dd4a893d" containerName="dnsmasq-dns" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.248864 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.253558 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.253905 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.254121 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.254258 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-8wn29" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.285666 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.399398 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.399442 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b3b6f8-cfec-49c7-aca0-37647aacc62b-config\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.399459 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.399556 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29b3b6f8-cfec-49c7-aca0-37647aacc62b-scripts\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.399572 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29b3b6f8-cfec-49c7-aca0-37647aacc62b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.399609 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.399635 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwvz5\" (UniqueName: \"kubernetes.io/projected/29b3b6f8-cfec-49c7-aca0-37647aacc62b-kube-api-access-vwvz5\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.500752 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.500817 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwvz5\" (UniqueName: \"kubernetes.io/projected/29b3b6f8-cfec-49c7-aca0-37647aacc62b-kube-api-access-vwvz5\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.500886 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.500910 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b3b6f8-cfec-49c7-aca0-37647aacc62b-config\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.500954 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.501019 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29b3b6f8-cfec-49c7-aca0-37647aacc62b-scripts\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.501042 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29b3b6f8-cfec-49c7-aca0-37647aacc62b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.505838 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/29b3b6f8-cfec-49c7-aca0-37647aacc62b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.506278 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29b3b6f8-cfec-49c7-aca0-37647aacc62b-config\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.506852 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29b3b6f8-cfec-49c7-aca0-37647aacc62b-scripts\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.514588 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.515068 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.517553 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/29b3b6f8-cfec-49c7-aca0-37647aacc62b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.517564 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwvz5\" (UniqueName: \"kubernetes.io/projected/29b3b6f8-cfec-49c7-aca0-37647aacc62b-kube-api-access-vwvz5\") pod \"ovn-northd-0\" (UID: \"29b3b6f8-cfec-49c7-aca0-37647aacc62b\") " pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.596113 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.613565 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.702950 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-dns-svc\") pod \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.703051 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-config\") pod \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.703098 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htvn8\" (UniqueName: \"kubernetes.io/projected/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-kube-api-access-htvn8\") pod \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.703159 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-ovsdbserver-nb\") pod \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\" (UID: \"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e\") " Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.714604 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-kube-api-access-htvn8" (OuterVolumeSpecName: "kube-api-access-htvn8") pod "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" (UID: "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e"). InnerVolumeSpecName "kube-api-access-htvn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.759194 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" (UID: "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.776620 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" (UID: "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.779696 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-config" (OuterVolumeSpecName: "config") pod "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" (UID: "7c32dfcc-762c-44c9-aa21-b5d95bfbd21e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.804455 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.805760 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.805785 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htvn8\" (UniqueName: \"kubernetes.io/projected/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-kube-api-access-htvn8\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:38 crc kubenswrapper[4747]: I0202 09:10:38.805795 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:39 crc kubenswrapper[4747]: E0202 09:10:39.008200 4747 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.190:41490->38.102.83.190:43437: write tcp 38.102.83.190:41490->38.102.83.190:43437: write: broken pipe Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.052146 4747 generic.go:334] "Generic (PLEG): container finished" podID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerID="cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2" exitCode=0 Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.052209 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.052564 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" event={"ID":"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e","Type":"ContainerDied","Data":"cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2"} Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.052616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-fp849" event={"ID":"7c32dfcc-762c-44c9-aa21-b5d95bfbd21e","Type":"ContainerDied","Data":"664bc551059432f468d1fcfd9892a19ef0521096cd79fd0201a519e59536c77d"} Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.052635 4747 scope.go:117] "RemoveContainer" containerID="cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.057536 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.073183 4747 scope.go:117] "RemoveContainer" containerID="3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.086743 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fp849"] Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.095657 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-fp849"] Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.098861 4747 scope.go:117] "RemoveContainer" containerID="cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2" Feb 02 09:10:39 crc kubenswrapper[4747]: E0202 09:10:39.099274 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2\": container with ID starting with cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2 not found: ID does not exist" containerID="cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.099302 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2"} err="failed to get container status \"cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2\": rpc error: code = NotFound desc = could not find container \"cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2\": container with ID starting with cc358d9629400ef77c8129f48be7db41518080b8d49c7832dad2a3491934e9d2 not found: ID does not exist" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.099321 4747 scope.go:117] "RemoveContainer" containerID="3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676" Feb 02 09:10:39 crc kubenswrapper[4747]: E0202 09:10:39.099511 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676\": container with ID starting with 3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676 not found: ID does not exist" containerID="3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.099532 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676"} err="failed to get container status \"3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676\": rpc error: code = NotFound desc = could not find container \"3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676\": container with ID starting with 3b74b92ffaf5caf48d5f695fb24f2283d94002b3ba05e4af1f89ddf5d2509676 not found: ID does not exist" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.592321 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:39 crc kubenswrapper[4747]: I0202 09:10:39.592396 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:40 crc kubenswrapper[4747]: I0202 09:10:40.088158 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29b3b6f8-cfec-49c7-aca0-37647aacc62b","Type":"ContainerStarted","Data":"0cb4c00c1b5df4d59ad566747a943c6bbaf8e6bd1360fe5fa593a9de3f380225"} Feb 02 09:10:40 crc kubenswrapper[4747]: I0202 09:10:40.347195 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" path="/var/lib/kubelet/pods/7c32dfcc-762c-44c9-aa21-b5d95bfbd21e/volumes" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.012504 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.099171 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29b3b6f8-cfec-49c7-aca0-37647aacc62b","Type":"ContainerStarted","Data":"68b22bf28c17f3eb31d192646ffd50073dba66d9011f8754d09cccff30a28ae8"} Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.099217 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"29b3b6f8-cfec-49c7-aca0-37647aacc62b","Type":"ContainerStarted","Data":"4d18794fd88e0b4e47719cefa90438803dc37d6e4d72b9e3df99f918aebc2c5d"} Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.099333 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.118443 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.125916 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.152297618 podStartE2EDuration="3.125889084s" podCreationTimestamp="2026-02-02 09:10:38 +0000 UTC" firstStartedPulling="2026-02-02 09:10:39.061782318 +0000 UTC m=+851.606120751" lastFinishedPulling="2026-02-02 09:10:40.035373774 +0000 UTC m=+852.579712217" observedRunningTime="2026-02-02 09:10:41.11649094 +0000 UTC m=+853.660829413" watchObservedRunningTime="2026-02-02 09:10:41.125889084 +0000 UTC m=+853.670227547" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.849215 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.923746 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-qk4l8"] Feb 02 09:10:41 crc kubenswrapper[4747]: E0202 09:10:41.924127 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerName="init" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.924147 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerName="init" Feb 02 09:10:41 crc kubenswrapper[4747]: E0202 09:10:41.924166 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerName="dnsmasq-dns" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.924174 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerName="dnsmasq-dns" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.924338 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c32dfcc-762c-44c9-aa21-b5d95bfbd21e" containerName="dnsmasq-dns" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.925103 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:41 crc kubenswrapper[4747]: I0202 09:10:41.951468 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qk4l8"] Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.065918 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.066099 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-config\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.066204 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-dns-svc\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.066256 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.066348 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbndg\" (UniqueName: \"kubernetes.io/projected/dd031753-0dc4-4ce4-be69-2d28f88f008b-kube-api-access-dbndg\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.169478 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-dns-svc\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.169533 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.169592 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbndg\" (UniqueName: \"kubernetes.io/projected/dd031753-0dc4-4ce4-be69-2d28f88f008b-kube-api-access-dbndg\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.169656 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.169687 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-config\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.170459 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.170556 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-config\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.170606 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.170913 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-dns-svc\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.195987 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbndg\" (UniqueName: \"kubernetes.io/projected/dd031753-0dc4-4ce4-be69-2d28f88f008b-kube-api-access-dbndg\") pod \"dnsmasq-dns-698758b865-qk4l8\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.251853 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.280008 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.404359 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 02 09:10:42 crc kubenswrapper[4747]: I0202 09:10:42.718889 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qk4l8"] Feb 02 09:10:42 crc kubenswrapper[4747]: W0202 09:10:42.724223 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd031753_0dc4_4ce4_be69_2d28f88f008b.slice/crio-c1f5fc8be24b6e592ef435bccc95bf6864562e15f363723d4a85b250a17e4b64 WatchSource:0}: Error finding container c1f5fc8be24b6e592ef435bccc95bf6864562e15f363723d4a85b250a17e4b64: Status 404 returned error can't find the container with id c1f5fc8be24b6e592ef435bccc95bf6864562e15f363723d4a85b250a17e4b64 Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.070506 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.075583 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.077781 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.077899 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.078045 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.078102 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-v5ngv" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.098956 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.124755 4747 generic.go:334] "Generic (PLEG): container finished" podID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerID="be82b833501e5118114b4aa3903fd2dff8149e9740a5eba40439c63e200b2cc0" exitCode=0 Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.124851 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qk4l8" event={"ID":"dd031753-0dc4-4ce4-be69-2d28f88f008b","Type":"ContainerDied","Data":"be82b833501e5118114b4aa3903fd2dff8149e9740a5eba40439c63e200b2cc0"} Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.124896 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qk4l8" event={"ID":"dd031753-0dc4-4ce4-be69-2d28f88f008b","Type":"ContainerStarted","Data":"c1f5fc8be24b6e592ef435bccc95bf6864562e15f363723d4a85b250a17e4b64"} Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.184863 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46pvl\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-kube-api-access-46pvl\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.185017 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.185049 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.185146 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-cache\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.185169 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-lock\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.185240 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287015 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-cache\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287066 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-lock\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287112 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287150 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46pvl\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-kube-api-access-46pvl\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287192 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287212 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: E0202 09:10:43.287385 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 09:10:43 crc kubenswrapper[4747]: E0202 09:10:43.287399 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 09:10:43 crc kubenswrapper[4747]: E0202 09:10:43.287452 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift podName:30a3d44f-2ad1-4d00-824e-1e1cdaa048ad nodeName:}" failed. No retries permitted until 2026-02-02 09:10:43.787435832 +0000 UTC m=+856.331774265 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift") pod "swift-storage-0" (UID: "30a3d44f-2ad1-4d00-824e-1e1cdaa048ad") : configmap "swift-ring-files" not found Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287527 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287890 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-cache\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.287954 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-lock\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.300466 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.313558 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46pvl\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-kube-api-access-46pvl\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.316529 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: I0202 09:10:43.796230 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:43 crc kubenswrapper[4747]: E0202 09:10:43.796560 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 09:10:43 crc kubenswrapper[4747]: E0202 09:10:43.796633 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 09:10:43 crc kubenswrapper[4747]: E0202 09:10:43.796767 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift podName:30a3d44f-2ad1-4d00-824e-1e1cdaa048ad nodeName:}" failed. No retries permitted until 2026-02-02 09:10:44.796750527 +0000 UTC m=+857.341088960 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift") pod "swift-storage-0" (UID: "30a3d44f-2ad1-4d00-824e-1e1cdaa048ad") : configmap "swift-ring-files" not found Feb 02 09:10:44 crc kubenswrapper[4747]: I0202 09:10:44.133174 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qk4l8" event={"ID":"dd031753-0dc4-4ce4-be69-2d28f88f008b","Type":"ContainerStarted","Data":"4017961c9d3c9c26b4cf373b2e9a3699867f8bd938474db5a5bca63d01f3c3d3"} Feb 02 09:10:44 crc kubenswrapper[4747]: I0202 09:10:44.133584 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:44 crc kubenswrapper[4747]: I0202 09:10:44.153984 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-qk4l8" podStartSLOduration=3.153965447 podStartE2EDuration="3.153965447s" podCreationTimestamp="2026-02-02 09:10:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:10:44.148042809 +0000 UTC m=+856.692381242" watchObservedRunningTime="2026-02-02 09:10:44.153965447 +0000 UTC m=+856.698303880" Feb 02 09:10:44 crc kubenswrapper[4747]: I0202 09:10:44.813692 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:44 crc kubenswrapper[4747]: E0202 09:10:44.813864 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 09:10:44 crc kubenswrapper[4747]: E0202 09:10:44.814071 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 09:10:44 crc kubenswrapper[4747]: E0202 09:10:44.814134 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift podName:30a3d44f-2ad1-4d00-824e-1e1cdaa048ad nodeName:}" failed. No retries permitted until 2026-02-02 09:10:46.814114113 +0000 UTC m=+859.358452546 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift") pod "swift-storage-0" (UID: "30a3d44f-2ad1-4d00-824e-1e1cdaa048ad") : configmap "swift-ring-files" not found Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.239503 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-d8hcv"] Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.242422 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.247453 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-f601-account-create-update-98lbw"] Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.248804 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.250589 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.278344 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f601-account-create-update-98lbw"] Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.308667 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-d8hcv"] Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.324598 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n762n\" (UniqueName: \"kubernetes.io/projected/981047ad-5ae2-4e3a-ad38-ecfa32e93664-kube-api-access-n762n\") pod \"glance-db-create-d8hcv\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.324667 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-operator-scripts\") pod \"glance-f601-account-create-update-98lbw\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.324710 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stwbx\" (UniqueName: \"kubernetes.io/projected/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-kube-api-access-stwbx\") pod \"glance-f601-account-create-update-98lbw\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.324758 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/981047ad-5ae2-4e3a-ad38-ecfa32e93664-operator-scripts\") pod \"glance-db-create-d8hcv\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.427948 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/981047ad-5ae2-4e3a-ad38-ecfa32e93664-operator-scripts\") pod \"glance-db-create-d8hcv\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.429117 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n762n\" (UniqueName: \"kubernetes.io/projected/981047ad-5ae2-4e3a-ad38-ecfa32e93664-kube-api-access-n762n\") pod \"glance-db-create-d8hcv\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.429183 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-operator-scripts\") pod \"glance-f601-account-create-update-98lbw\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.429251 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stwbx\" (UniqueName: \"kubernetes.io/projected/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-kube-api-access-stwbx\") pod \"glance-f601-account-create-update-98lbw\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.429716 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/981047ad-5ae2-4e3a-ad38-ecfa32e93664-operator-scripts\") pod \"glance-db-create-d8hcv\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.430370 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-operator-scripts\") pod \"glance-f601-account-create-update-98lbw\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.447535 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stwbx\" (UniqueName: \"kubernetes.io/projected/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-kube-api-access-stwbx\") pod \"glance-f601-account-create-update-98lbw\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.448044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n762n\" (UniqueName: \"kubernetes.io/projected/981047ad-5ae2-4e3a-ad38-ecfa32e93664-kube-api-access-n762n\") pod \"glance-db-create-d8hcv\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.647885 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:45 crc kubenswrapper[4747]: I0202 09:10:45.659312 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.220346 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-d8hcv"] Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.258452 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-f601-account-create-update-98lbw"] Feb 02 09:10:46 crc kubenswrapper[4747]: W0202 09:10:46.261426 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d1d042f_43a4_4ca2_9fbd_259c19c488bb.slice/crio-43584ea6781c41b416b931a9d073d0f9c95dac8f3d812ec380c7abaf7ebcef05 WatchSource:0}: Error finding container 43584ea6781c41b416b931a9d073d0f9c95dac8f3d812ec380c7abaf7ebcef05: Status 404 returned error can't find the container with id 43584ea6781c41b416b931a9d073d0f9c95dac8f3d812ec380c7abaf7ebcef05 Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.864242 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:46 crc kubenswrapper[4747]: E0202 09:10:46.864444 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 09:10:46 crc kubenswrapper[4747]: E0202 09:10:46.864709 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 09:10:46 crc kubenswrapper[4747]: E0202 09:10:46.864782 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift podName:30a3d44f-2ad1-4d00-824e-1e1cdaa048ad nodeName:}" failed. No retries permitted until 2026-02-02 09:10:50.864759964 +0000 UTC m=+863.409098397 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift") pod "swift-storage-0" (UID: "30a3d44f-2ad1-4d00-824e-1e1cdaa048ad") : configmap "swift-ring-files" not found Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.881328 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-bhx5q"] Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.883256 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.890230 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.907908 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-bhx5q"] Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.966393 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c5ee851-a38f-4080-9b81-4b9543452a5a-operator-scripts\") pod \"root-account-create-update-bhx5q\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.966446 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vmdq\" (UniqueName: \"kubernetes.io/projected/2c5ee851-a38f-4080-9b81-4b9543452a5a-kube-api-access-6vmdq\") pod \"root-account-create-update-bhx5q\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.997409 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-tzwjl"] Feb 02 09:10:46 crc kubenswrapper[4747]: I0202 09:10:46.998707 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.001744 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.001905 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.001905 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.015707 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-tzwjl"] Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.067612 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vmdq\" (UniqueName: \"kubernetes.io/projected/2c5ee851-a38f-4080-9b81-4b9543452a5a-kube-api-access-6vmdq\") pod \"root-account-create-update-bhx5q\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.067682 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/662dc11b-2635-4ef2-a109-900fa5c109fb-etc-swift\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.067726 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4bs2\" (UniqueName: \"kubernetes.io/projected/662dc11b-2635-4ef2-a109-900fa5c109fb-kube-api-access-l4bs2\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.067754 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-combined-ca-bundle\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.067926 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-ring-data-devices\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.068139 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-swiftconf\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.068181 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-scripts\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.068211 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-dispersionconf\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.068362 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c5ee851-a38f-4080-9b81-4b9543452a5a-operator-scripts\") pod \"root-account-create-update-bhx5q\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.069046 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c5ee851-a38f-4080-9b81-4b9543452a5a-operator-scripts\") pod \"root-account-create-update-bhx5q\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.088573 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vmdq\" (UniqueName: \"kubernetes.io/projected/2c5ee851-a38f-4080-9b81-4b9543452a5a-kube-api-access-6vmdq\") pod \"root-account-create-update-bhx5q\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.153726 4747 generic.go:334] "Generic (PLEG): container finished" podID="3d1d042f-43a4-4ca2-9fbd-259c19c488bb" containerID="b77d14a5bf0f049adab94d00d0917dcae42b3c5107d5b662f166e4e007816623" exitCode=0 Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.153791 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f601-account-create-update-98lbw" event={"ID":"3d1d042f-43a4-4ca2-9fbd-259c19c488bb","Type":"ContainerDied","Data":"b77d14a5bf0f049adab94d00d0917dcae42b3c5107d5b662f166e4e007816623"} Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.153822 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f601-account-create-update-98lbw" event={"ID":"3d1d042f-43a4-4ca2-9fbd-259c19c488bb","Type":"ContainerStarted","Data":"43584ea6781c41b416b931a9d073d0f9c95dac8f3d812ec380c7abaf7ebcef05"} Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.156070 4747 generic.go:334] "Generic (PLEG): container finished" podID="981047ad-5ae2-4e3a-ad38-ecfa32e93664" containerID="e9651f3b3df2df63c74d2dded7b40633fa9db6d7f997db39001e13635b17845a" exitCode=0 Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.156123 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-d8hcv" event={"ID":"981047ad-5ae2-4e3a-ad38-ecfa32e93664","Type":"ContainerDied","Data":"e9651f3b3df2df63c74d2dded7b40633fa9db6d7f997db39001e13635b17845a"} Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.156157 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-d8hcv" event={"ID":"981047ad-5ae2-4e3a-ad38-ecfa32e93664","Type":"ContainerStarted","Data":"f68105ec2a7abc6cc9f1d85095275852f7d0cad3d2233e6fbe3c0fac3d6016ee"} Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.169578 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-swiftconf\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.169634 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-scripts\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.169664 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-dispersionconf\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.169825 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/662dc11b-2635-4ef2-a109-900fa5c109fb-etc-swift\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.169890 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4bs2\" (UniqueName: \"kubernetes.io/projected/662dc11b-2635-4ef2-a109-900fa5c109fb-kube-api-access-l4bs2\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.169978 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-combined-ca-bundle\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.170027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-ring-data-devices\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.170518 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/662dc11b-2635-4ef2-a109-900fa5c109fb-etc-swift\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.170867 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-ring-data-devices\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.170882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-scripts\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.172980 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-dispersionconf\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.172995 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-swiftconf\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.175952 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-combined-ca-bundle\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.192964 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4bs2\" (UniqueName: \"kubernetes.io/projected/662dc11b-2635-4ef2-a109-900fa5c109fb-kube-api-access-l4bs2\") pod \"swift-ring-rebalance-tzwjl\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.209033 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.338508 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.675985 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-bhx5q"] Feb 02 09:10:47 crc kubenswrapper[4747]: W0202 09:10:47.684122 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c5ee851_a38f_4080_9b81_4b9543452a5a.slice/crio-d9e41b1f4c025a6776140ecfd984f0fd6570c01cb789654c42e91296d67008fb WatchSource:0}: Error finding container d9e41b1f4c025a6776140ecfd984f0fd6570c01cb789654c42e91296d67008fb: Status 404 returned error can't find the container with id d9e41b1f4c025a6776140ecfd984f0fd6570c01cb789654c42e91296d67008fb Feb 02 09:10:47 crc kubenswrapper[4747]: I0202 09:10:47.775089 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-tzwjl"] Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.181733 4747 generic.go:334] "Generic (PLEG): container finished" podID="2c5ee851-a38f-4080-9b81-4b9543452a5a" containerID="d27ad240800257295ba6b49f728a7dfec10b39ed80fb0d1377f15e9eff379b5e" exitCode=0 Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.182333 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-bhx5q" event={"ID":"2c5ee851-a38f-4080-9b81-4b9543452a5a","Type":"ContainerDied","Data":"d27ad240800257295ba6b49f728a7dfec10b39ed80fb0d1377f15e9eff379b5e"} Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.183368 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-bhx5q" event={"ID":"2c5ee851-a38f-4080-9b81-4b9543452a5a","Type":"ContainerStarted","Data":"d9e41b1f4c025a6776140ecfd984f0fd6570c01cb789654c42e91296d67008fb"} Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.184788 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tzwjl" event={"ID":"662dc11b-2635-4ef2-a109-900fa5c109fb","Type":"ContainerStarted","Data":"bbdbd4f7a9f861967ac36d6234f2bf8c1e3cc88d6aac2722e240466ae7fcd4ca"} Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.636210 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.642992 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.701027 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n762n\" (UniqueName: \"kubernetes.io/projected/981047ad-5ae2-4e3a-ad38-ecfa32e93664-kube-api-access-n762n\") pod \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.701281 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stwbx\" (UniqueName: \"kubernetes.io/projected/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-kube-api-access-stwbx\") pod \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.701866 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-operator-scripts\") pod \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\" (UID: \"3d1d042f-43a4-4ca2-9fbd-259c19c488bb\") " Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.701908 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/981047ad-5ae2-4e3a-ad38-ecfa32e93664-operator-scripts\") pod \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\" (UID: \"981047ad-5ae2-4e3a-ad38-ecfa32e93664\") " Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.702652 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3d1d042f-43a4-4ca2-9fbd-259c19c488bb" (UID: "3d1d042f-43a4-4ca2-9fbd-259c19c488bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.702682 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/981047ad-5ae2-4e3a-ad38-ecfa32e93664-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "981047ad-5ae2-4e3a-ad38-ecfa32e93664" (UID: "981047ad-5ae2-4e3a-ad38-ecfa32e93664"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.707742 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/981047ad-5ae2-4e3a-ad38-ecfa32e93664-kube-api-access-n762n" (OuterVolumeSpecName: "kube-api-access-n762n") pod "981047ad-5ae2-4e3a-ad38-ecfa32e93664" (UID: "981047ad-5ae2-4e3a-ad38-ecfa32e93664"). InnerVolumeSpecName "kube-api-access-n762n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.707823 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-kube-api-access-stwbx" (OuterVolumeSpecName: "kube-api-access-stwbx") pod "3d1d042f-43a4-4ca2-9fbd-259c19c488bb" (UID: "3d1d042f-43a4-4ca2-9fbd-259c19c488bb"). InnerVolumeSpecName "kube-api-access-stwbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.803997 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stwbx\" (UniqueName: \"kubernetes.io/projected/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-kube-api-access-stwbx\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.804041 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d1d042f-43a4-4ca2-9fbd-259c19c488bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.804053 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/981047ad-5ae2-4e3a-ad38-ecfa32e93664-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:48 crc kubenswrapper[4747]: I0202 09:10:48.804067 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n762n\" (UniqueName: \"kubernetes.io/projected/981047ad-5ae2-4e3a-ad38-ecfa32e93664-kube-api-access-n762n\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.192032 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-d8hcv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.192075 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-d8hcv" event={"ID":"981047ad-5ae2-4e3a-ad38-ecfa32e93664","Type":"ContainerDied","Data":"f68105ec2a7abc6cc9f1d85095275852f7d0cad3d2233e6fbe3c0fac3d6016ee"} Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.192169 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f68105ec2a7abc6cc9f1d85095275852f7d0cad3d2233e6fbe3c0fac3d6016ee" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.193191 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-f601-account-create-update-98lbw" event={"ID":"3d1d042f-43a4-4ca2-9fbd-259c19c488bb","Type":"ContainerDied","Data":"43584ea6781c41b416b931a9d073d0f9c95dac8f3d812ec380c7abaf7ebcef05"} Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.193216 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-f601-account-create-update-98lbw" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.193232 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43584ea6781c41b416b931a9d073d0f9c95dac8f3d812ec380c7abaf7ebcef05" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.528115 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-q8tlv"] Feb 02 09:10:49 crc kubenswrapper[4747]: E0202 09:10:49.528496 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="981047ad-5ae2-4e3a-ad38-ecfa32e93664" containerName="mariadb-database-create" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.528520 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="981047ad-5ae2-4e3a-ad38-ecfa32e93664" containerName="mariadb-database-create" Feb 02 09:10:49 crc kubenswrapper[4747]: E0202 09:10:49.528557 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1d042f-43a4-4ca2-9fbd-259c19c488bb" containerName="mariadb-account-create-update" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.528566 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1d042f-43a4-4ca2-9fbd-259c19c488bb" containerName="mariadb-account-create-update" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.528758 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d1d042f-43a4-4ca2-9fbd-259c19c488bb" containerName="mariadb-account-create-update" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.528776 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="981047ad-5ae2-4e3a-ad38-ecfa32e93664" containerName="mariadb-database-create" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.529360 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.549517 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-q8tlv"] Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.620953 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmvbd\" (UniqueName: \"kubernetes.io/projected/099ed0a4-89ce-418e-88e5-e93b2831cf94-kube-api-access-nmvbd\") pod \"keystone-db-create-q8tlv\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.621014 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099ed0a4-89ce-418e-88e5-e93b2831cf94-operator-scripts\") pod \"keystone-db-create-q8tlv\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.625817 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-3187-account-create-update-p79p5"] Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.626883 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.629522 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.633928 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3187-account-create-update-p79p5"] Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.722921 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099ed0a4-89ce-418e-88e5-e93b2831cf94-operator-scripts\") pod \"keystone-db-create-q8tlv\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.723067 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/168afe57-1b0e-4ace-8565-c535f289fdfe-operator-scripts\") pod \"keystone-3187-account-create-update-p79p5\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.723108 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hcwb\" (UniqueName: \"kubernetes.io/projected/168afe57-1b0e-4ace-8565-c535f289fdfe-kube-api-access-7hcwb\") pod \"keystone-3187-account-create-update-p79p5\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.723211 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmvbd\" (UniqueName: \"kubernetes.io/projected/099ed0a4-89ce-418e-88e5-e93b2831cf94-kube-api-access-nmvbd\") pod \"keystone-db-create-q8tlv\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.723836 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099ed0a4-89ce-418e-88e5-e93b2831cf94-operator-scripts\") pod \"keystone-db-create-q8tlv\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.741954 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmvbd\" (UniqueName: \"kubernetes.io/projected/099ed0a4-89ce-418e-88e5-e93b2831cf94-kube-api-access-nmvbd\") pod \"keystone-db-create-q8tlv\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.825092 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/168afe57-1b0e-4ace-8565-c535f289fdfe-operator-scripts\") pod \"keystone-3187-account-create-update-p79p5\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.825141 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hcwb\" (UniqueName: \"kubernetes.io/projected/168afe57-1b0e-4ace-8565-c535f289fdfe-kube-api-access-7hcwb\") pod \"keystone-3187-account-create-update-p79p5\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.826449 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/168afe57-1b0e-4ace-8565-c535f289fdfe-operator-scripts\") pod \"keystone-3187-account-create-update-p79p5\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.843972 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hcwb\" (UniqueName: \"kubernetes.io/projected/168afe57-1b0e-4ace-8565-c535f289fdfe-kube-api-access-7hcwb\") pod \"keystone-3187-account-create-update-p79p5\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.856566 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.886836 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-4qhvd"] Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.888141 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.899818 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-4qhvd"] Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.926753 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4f8q\" (UniqueName: \"kubernetes.io/projected/e16f96e6-5142-4edf-81bb-0e8eb79728b4-kube-api-access-v4f8q\") pod \"placement-db-create-4qhvd\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.926866 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e16f96e6-5142-4edf-81bb-0e8eb79728b4-operator-scripts\") pod \"placement-db-create-4qhvd\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.958551 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.983366 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-44ee-account-create-update-zqfb7"] Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.984540 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.988697 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 02 09:10:49 crc kubenswrapper[4747]: I0202 09:10:49.991406 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-44ee-account-create-update-zqfb7"] Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.028045 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e16f96e6-5142-4edf-81bb-0e8eb79728b4-operator-scripts\") pod \"placement-db-create-4qhvd\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.028167 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3684c04e-f715-4281-b323-5d0097ddded9-operator-scripts\") pod \"placement-44ee-account-create-update-zqfb7\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.028261 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4f8q\" (UniqueName: \"kubernetes.io/projected/e16f96e6-5142-4edf-81bb-0e8eb79728b4-kube-api-access-v4f8q\") pod \"placement-db-create-4qhvd\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.028296 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pflg4\" (UniqueName: \"kubernetes.io/projected/3684c04e-f715-4281-b323-5d0097ddded9-kube-api-access-pflg4\") pod \"placement-44ee-account-create-update-zqfb7\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.029236 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e16f96e6-5142-4edf-81bb-0e8eb79728b4-operator-scripts\") pod \"placement-db-create-4qhvd\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.045413 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4f8q\" (UniqueName: \"kubernetes.io/projected/e16f96e6-5142-4edf-81bb-0e8eb79728b4-kube-api-access-v4f8q\") pod \"placement-db-create-4qhvd\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.133225 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3684c04e-f715-4281-b323-5d0097ddded9-operator-scripts\") pod \"placement-44ee-account-create-update-zqfb7\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.133306 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pflg4\" (UniqueName: \"kubernetes.io/projected/3684c04e-f715-4281-b323-5d0097ddded9-kube-api-access-pflg4\") pod \"placement-44ee-account-create-update-zqfb7\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.134118 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3684c04e-f715-4281-b323-5d0097ddded9-operator-scripts\") pod \"placement-44ee-account-create-update-zqfb7\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.153253 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pflg4\" (UniqueName: \"kubernetes.io/projected/3684c04e-f715-4281-b323-5d0097ddded9-kube-api-access-pflg4\") pod \"placement-44ee-account-create-update-zqfb7\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.216584 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.302464 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.368451 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.442612 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c5ee851-a38f-4080-9b81-4b9543452a5a-operator-scripts\") pod \"2c5ee851-a38f-4080-9b81-4b9543452a5a\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.442733 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vmdq\" (UniqueName: \"kubernetes.io/projected/2c5ee851-a38f-4080-9b81-4b9543452a5a-kube-api-access-6vmdq\") pod \"2c5ee851-a38f-4080-9b81-4b9543452a5a\" (UID: \"2c5ee851-a38f-4080-9b81-4b9543452a5a\") " Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.443559 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c5ee851-a38f-4080-9b81-4b9543452a5a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2c5ee851-a38f-4080-9b81-4b9543452a5a" (UID: "2c5ee851-a38f-4080-9b81-4b9543452a5a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.447546 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c5ee851-a38f-4080-9b81-4b9543452a5a-kube-api-access-6vmdq" (OuterVolumeSpecName: "kube-api-access-6vmdq") pod "2c5ee851-a38f-4080-9b81-4b9543452a5a" (UID: "2c5ee851-a38f-4080-9b81-4b9543452a5a"). InnerVolumeSpecName "kube-api-access-6vmdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.459306 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-5xc45"] Feb 02 09:10:50 crc kubenswrapper[4747]: E0202 09:10:50.459696 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5ee851-a38f-4080-9b81-4b9543452a5a" containerName="mariadb-account-create-update" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.459718 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5ee851-a38f-4080-9b81-4b9543452a5a" containerName="mariadb-account-create-update" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.459964 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c5ee851-a38f-4080-9b81-4b9543452a5a" containerName="mariadb-account-create-update" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.460582 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.462875 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.463036 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2v6s8" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.466393 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-5xc45"] Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.544839 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-db-sync-config-data\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.544905 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-config-data\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.545020 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-combined-ca-bundle\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.545056 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rctcr\" (UniqueName: \"kubernetes.io/projected/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-kube-api-access-rctcr\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.545144 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c5ee851-a38f-4080-9b81-4b9543452a5a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.545160 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vmdq\" (UniqueName: \"kubernetes.io/projected/2c5ee851-a38f-4080-9b81-4b9543452a5a-kube-api-access-6vmdq\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.646163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-db-sync-config-data\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.646210 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-config-data\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.646280 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-combined-ca-bundle\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.646310 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rctcr\" (UniqueName: \"kubernetes.io/projected/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-kube-api-access-rctcr\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.650616 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-db-sync-config-data\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.651142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-config-data\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.658240 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-combined-ca-bundle\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.661179 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rctcr\" (UniqueName: \"kubernetes.io/projected/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-kube-api-access-rctcr\") pod \"glance-db-sync-5xc45\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.777948 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5xc45" Feb 02 09:10:50 crc kubenswrapper[4747]: I0202 09:10:50.950706 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:50 crc kubenswrapper[4747]: E0202 09:10:50.950872 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 09:10:50 crc kubenswrapper[4747]: E0202 09:10:50.950894 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 09:10:50 crc kubenswrapper[4747]: E0202 09:10:50.950956 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift podName:30a3d44f-2ad1-4d00-824e-1e1cdaa048ad nodeName:}" failed. No retries permitted until 2026-02-02 09:10:58.950924589 +0000 UTC m=+871.495263012 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift") pod "swift-storage-0" (UID: "30a3d44f-2ad1-4d00-824e-1e1cdaa048ad") : configmap "swift-ring-files" not found Feb 02 09:10:51 crc kubenswrapper[4747]: I0202 09:10:51.220356 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-bhx5q" event={"ID":"2c5ee851-a38f-4080-9b81-4b9543452a5a","Type":"ContainerDied","Data":"d9e41b1f4c025a6776140ecfd984f0fd6570c01cb789654c42e91296d67008fb"} Feb 02 09:10:51 crc kubenswrapper[4747]: I0202 09:10:51.220389 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9e41b1f4c025a6776140ecfd984f0fd6570c01cb789654c42e91296d67008fb" Feb 02 09:10:51 crc kubenswrapper[4747]: I0202 09:10:51.220444 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-bhx5q" Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.058896 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3187-account-create-update-p79p5"] Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.189782 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-4qhvd"] Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.242366 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3187-account-create-update-p79p5" event={"ID":"168afe57-1b0e-4ace-8565-c535f289fdfe","Type":"ContainerStarted","Data":"06afe3c387dd68a26adf536f96c6055a6f59ed23ce1d3f2204ab6004e9856811"} Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.248733 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4qhvd" event={"ID":"e16f96e6-5142-4edf-81bb-0e8eb79728b4","Type":"ContainerStarted","Data":"b2df9aa2ec4c527e72efa077bdbf0c1204c8ea792cbbb63fc5118e9e80f2d52d"} Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.250472 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-q8tlv"] Feb 02 09:10:52 crc kubenswrapper[4747]: W0202 09:10:52.252594 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod099ed0a4_89ce_418e_88e5_e93b2831cf94.slice/crio-68325351a1895f0693fde4efa32eefaca74e09409ddf0c85e7db81db09e47374 WatchSource:0}: Error finding container 68325351a1895f0693fde4efa32eefaca74e09409ddf0c85e7db81db09e47374: Status 404 returned error can't find the container with id 68325351a1895f0693fde4efa32eefaca74e09409ddf0c85e7db81db09e47374 Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.252927 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tzwjl" event={"ID":"662dc11b-2635-4ef2-a109-900fa5c109fb","Type":"ContainerStarted","Data":"f7c8953830bb936f35b9689c6ba45ba536b885f6fa9c859bd4c77ce2058532dd"} Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.253153 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.313181 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-44ee-account-create-update-zqfb7"] Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.322086 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-tzwjl" podStartSLOduration=2.496706219 podStartE2EDuration="6.322061889s" podCreationTimestamp="2026-02-02 09:10:46 +0000 UTC" firstStartedPulling="2026-02-02 09:10:47.785185433 +0000 UTC m=+860.329523886" lastFinishedPulling="2026-02-02 09:10:51.610541123 +0000 UTC m=+864.154879556" observedRunningTime="2026-02-02 09:10:52.277095598 +0000 UTC m=+864.821434041" watchObservedRunningTime="2026-02-02 09:10:52.322061889 +0000 UTC m=+864.866400312" Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.368685 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-khjv9"] Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.369542 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" podUID="12c98ada-7b2c-458c-b998-998febe86c53" containerName="dnsmasq-dns" containerID="cri-o://9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375" gracePeriod=10 Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.373430 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-5xc45"] Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.953785 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.995272 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-dns-svc\") pod \"12c98ada-7b2c-458c-b998-998febe86c53\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.995340 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-nb\") pod \"12c98ada-7b2c-458c-b998-998febe86c53\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.995481 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4frh\" (UniqueName: \"kubernetes.io/projected/12c98ada-7b2c-458c-b998-998febe86c53-kube-api-access-g4frh\") pod \"12c98ada-7b2c-458c-b998-998febe86c53\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.995565 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-config\") pod \"12c98ada-7b2c-458c-b998-998febe86c53\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " Feb 02 09:10:52 crc kubenswrapper[4747]: I0202 09:10:52.995614 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-sb\") pod \"12c98ada-7b2c-458c-b998-998febe86c53\" (UID: \"12c98ada-7b2c-458c-b998-998febe86c53\") " Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.002910 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12c98ada-7b2c-458c-b998-998febe86c53-kube-api-access-g4frh" (OuterVolumeSpecName: "kube-api-access-g4frh") pod "12c98ada-7b2c-458c-b998-998febe86c53" (UID: "12c98ada-7b2c-458c-b998-998febe86c53"). InnerVolumeSpecName "kube-api-access-g4frh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.065150 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "12c98ada-7b2c-458c-b998-998febe86c53" (UID: "12c98ada-7b2c-458c-b998-998febe86c53"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.069875 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "12c98ada-7b2c-458c-b998-998febe86c53" (UID: "12c98ada-7b2c-458c-b998-998febe86c53"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.074144 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "12c98ada-7b2c-458c-b998-998febe86c53" (UID: "12c98ada-7b2c-458c-b998-998febe86c53"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.077135 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-config" (OuterVolumeSpecName: "config") pod "12c98ada-7b2c-458c-b998-998febe86c53" (UID: "12c98ada-7b2c-458c-b998-998febe86c53"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.097468 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.097506 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.097518 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.097531 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/12c98ada-7b2c-458c-b998-998febe86c53-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.097543 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4frh\" (UniqueName: \"kubernetes.io/projected/12c98ada-7b2c-458c-b998-998febe86c53-kube-api-access-g4frh\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.259917 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-bhx5q"] Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.266260 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-bhx5q"] Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.267550 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5xc45" event={"ID":"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885","Type":"ContainerStarted","Data":"52911384e1f3db351d9624d4bf25c10607ce9053b1604f8b3e7e86486a8f6c73"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.269336 4747 generic.go:334] "Generic (PLEG): container finished" podID="168afe57-1b0e-4ace-8565-c535f289fdfe" containerID="91553e331e304db14bb31250696b4c88689b059278cfc2f7447c55d73d66936f" exitCode=0 Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.269919 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3187-account-create-update-p79p5" event={"ID":"168afe57-1b0e-4ace-8565-c535f289fdfe","Type":"ContainerDied","Data":"91553e331e304db14bb31250696b4c88689b059278cfc2f7447c55d73d66936f"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.272340 4747 generic.go:334] "Generic (PLEG): container finished" podID="e16f96e6-5142-4edf-81bb-0e8eb79728b4" containerID="ea1c9906bef1bf8dc273d88b385e2a017547e031a15b9d6b4e2051a29ef0997e" exitCode=0 Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.272501 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4qhvd" event={"ID":"e16f96e6-5142-4edf-81bb-0e8eb79728b4","Type":"ContainerDied","Data":"ea1c9906bef1bf8dc273d88b385e2a017547e031a15b9d6b4e2051a29ef0997e"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.274363 4747 generic.go:334] "Generic (PLEG): container finished" podID="099ed0a4-89ce-418e-88e5-e93b2831cf94" containerID="d3fd3f7212c6604ac79285e6831b88f56e04ec79bab1c27c4699713c115b5a3d" exitCode=0 Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.274698 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-q8tlv" event={"ID":"099ed0a4-89ce-418e-88e5-e93b2831cf94","Type":"ContainerDied","Data":"d3fd3f7212c6604ac79285e6831b88f56e04ec79bab1c27c4699713c115b5a3d"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.274811 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-q8tlv" event={"ID":"099ed0a4-89ce-418e-88e5-e93b2831cf94","Type":"ContainerStarted","Data":"68325351a1895f0693fde4efa32eefaca74e09409ddf0c85e7db81db09e47374"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.289069 4747 generic.go:334] "Generic (PLEG): container finished" podID="12c98ada-7b2c-458c-b998-998febe86c53" containerID="9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375" exitCode=0 Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.289499 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" event={"ID":"12c98ada-7b2c-458c-b998-998febe86c53","Type":"ContainerDied","Data":"9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.289538 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" event={"ID":"12c98ada-7b2c-458c-b998-998febe86c53","Type":"ContainerDied","Data":"b9c4aa10cd2fab1ce40fc5759341dcc07974b5b04de10097070d4065814004ad"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.289559 4747 scope.go:117] "RemoveContainer" containerID="9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.290012 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-khjv9" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.296288 4747 generic.go:334] "Generic (PLEG): container finished" podID="3684c04e-f715-4281-b323-5d0097ddded9" containerID="d72595de54c883632e35d328836897e3becab0dfb01220d4d609d9a29d04dcee" exitCode=0 Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.297591 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-44ee-account-create-update-zqfb7" event={"ID":"3684c04e-f715-4281-b323-5d0097ddded9","Type":"ContainerDied","Data":"d72595de54c883632e35d328836897e3becab0dfb01220d4d609d9a29d04dcee"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.297759 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-44ee-account-create-update-zqfb7" event={"ID":"3684c04e-f715-4281-b323-5d0097ddded9","Type":"ContainerStarted","Data":"8c528db3954051ea0efc8bcf0142de460a440af0406865004cdf238b25a35f39"} Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.324090 4747 scope.go:117] "RemoveContainer" containerID="960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.346032 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-khjv9"] Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.347995 4747 scope.go:117] "RemoveContainer" containerID="9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375" Feb 02 09:10:53 crc kubenswrapper[4747]: E0202 09:10:53.348410 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375\": container with ID starting with 9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375 not found: ID does not exist" containerID="9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.348478 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375"} err="failed to get container status \"9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375\": rpc error: code = NotFound desc = could not find container \"9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375\": container with ID starting with 9e0ea2a63655d5027f73d806bcfcdfcf25b1b05a05b752874a7bc4f082509375 not found: ID does not exist" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.348518 4747 scope.go:117] "RemoveContainer" containerID="960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e" Feb 02 09:10:53 crc kubenswrapper[4747]: E0202 09:10:53.349068 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e\": container with ID starting with 960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e not found: ID does not exist" containerID="960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.349111 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e"} err="failed to get container status \"960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e\": rpc error: code = NotFound desc = could not find container \"960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e\": container with ID starting with 960bdfc570cfd76c3dded1a2a38d74be6554b8941fd0c1fa911c85fc6930cd5e not found: ID does not exist" Feb 02 09:10:53 crc kubenswrapper[4747]: I0202 09:10:53.355778 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-khjv9"] Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.354860 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12c98ada-7b2c-458c-b998-998febe86c53" path="/var/lib/kubelet/pods/12c98ada-7b2c-458c-b998-998febe86c53/volumes" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.355437 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c5ee851-a38f-4080-9b81-4b9543452a5a" path="/var/lib/kubelet/pods/2c5ee851-a38f-4080-9b81-4b9543452a5a/volumes" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.691360 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.730448 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4f8q\" (UniqueName: \"kubernetes.io/projected/e16f96e6-5142-4edf-81bb-0e8eb79728b4-kube-api-access-v4f8q\") pod \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.730537 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e16f96e6-5142-4edf-81bb-0e8eb79728b4-operator-scripts\") pod \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\" (UID: \"e16f96e6-5142-4edf-81bb-0e8eb79728b4\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.731301 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e16f96e6-5142-4edf-81bb-0e8eb79728b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e16f96e6-5142-4edf-81bb-0e8eb79728b4" (UID: "e16f96e6-5142-4edf-81bb-0e8eb79728b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.737059 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e16f96e6-5142-4edf-81bb-0e8eb79728b4-kube-api-access-v4f8q" (OuterVolumeSpecName: "kube-api-access-v4f8q") pod "e16f96e6-5142-4edf-81bb-0e8eb79728b4" (UID: "e16f96e6-5142-4edf-81bb-0e8eb79728b4"). InnerVolumeSpecName "kube-api-access-v4f8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.825595 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.831898 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4f8q\" (UniqueName: \"kubernetes.io/projected/e16f96e6-5142-4edf-81bb-0e8eb79728b4-kube-api-access-v4f8q\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.831950 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e16f96e6-5142-4edf-81bb-0e8eb79728b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.832517 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.846321 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.932794 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pflg4\" (UniqueName: \"kubernetes.io/projected/3684c04e-f715-4281-b323-5d0097ddded9-kube-api-access-pflg4\") pod \"3684c04e-f715-4281-b323-5d0097ddded9\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.932894 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099ed0a4-89ce-418e-88e5-e93b2831cf94-operator-scripts\") pod \"099ed0a4-89ce-418e-88e5-e93b2831cf94\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.932996 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hcwb\" (UniqueName: \"kubernetes.io/projected/168afe57-1b0e-4ace-8565-c535f289fdfe-kube-api-access-7hcwb\") pod \"168afe57-1b0e-4ace-8565-c535f289fdfe\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.933488 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/099ed0a4-89ce-418e-88e5-e93b2831cf94-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "099ed0a4-89ce-418e-88e5-e93b2831cf94" (UID: "099ed0a4-89ce-418e-88e5-e93b2831cf94"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.933522 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3684c04e-f715-4281-b323-5d0097ddded9-operator-scripts\") pod \"3684c04e-f715-4281-b323-5d0097ddded9\" (UID: \"3684c04e-f715-4281-b323-5d0097ddded9\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.933614 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmvbd\" (UniqueName: \"kubernetes.io/projected/099ed0a4-89ce-418e-88e5-e93b2831cf94-kube-api-access-nmvbd\") pod \"099ed0a4-89ce-418e-88e5-e93b2831cf94\" (UID: \"099ed0a4-89ce-418e-88e5-e93b2831cf94\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.933695 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/168afe57-1b0e-4ace-8565-c535f289fdfe-operator-scripts\") pod \"168afe57-1b0e-4ace-8565-c535f289fdfe\" (UID: \"168afe57-1b0e-4ace-8565-c535f289fdfe\") " Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.933989 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3684c04e-f715-4281-b323-5d0097ddded9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3684c04e-f715-4281-b323-5d0097ddded9" (UID: "3684c04e-f715-4281-b323-5d0097ddded9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.934667 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099ed0a4-89ce-418e-88e5-e93b2831cf94-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.934694 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3684c04e-f715-4281-b323-5d0097ddded9-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.934713 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/168afe57-1b0e-4ace-8565-c535f289fdfe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "168afe57-1b0e-4ace-8565-c535f289fdfe" (UID: "168afe57-1b0e-4ace-8565-c535f289fdfe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.936297 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/168afe57-1b0e-4ace-8565-c535f289fdfe-kube-api-access-7hcwb" (OuterVolumeSpecName: "kube-api-access-7hcwb") pod "168afe57-1b0e-4ace-8565-c535f289fdfe" (UID: "168afe57-1b0e-4ace-8565-c535f289fdfe"). InnerVolumeSpecName "kube-api-access-7hcwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.937888 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/099ed0a4-89ce-418e-88e5-e93b2831cf94-kube-api-access-nmvbd" (OuterVolumeSpecName: "kube-api-access-nmvbd") pod "099ed0a4-89ce-418e-88e5-e93b2831cf94" (UID: "099ed0a4-89ce-418e-88e5-e93b2831cf94"). InnerVolumeSpecName "kube-api-access-nmvbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:54 crc kubenswrapper[4747]: I0202 09:10:54.942878 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3684c04e-f715-4281-b323-5d0097ddded9-kube-api-access-pflg4" (OuterVolumeSpecName: "kube-api-access-pflg4") pod "3684c04e-f715-4281-b323-5d0097ddded9" (UID: "3684c04e-f715-4281-b323-5d0097ddded9"). InnerVolumeSpecName "kube-api-access-pflg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.036427 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pflg4\" (UniqueName: \"kubernetes.io/projected/3684c04e-f715-4281-b323-5d0097ddded9-kube-api-access-pflg4\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.036556 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hcwb\" (UniqueName: \"kubernetes.io/projected/168afe57-1b0e-4ace-8565-c535f289fdfe-kube-api-access-7hcwb\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.036570 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmvbd\" (UniqueName: \"kubernetes.io/projected/099ed0a4-89ce-418e-88e5-e93b2831cf94-kube-api-access-nmvbd\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.036583 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/168afe57-1b0e-4ace-8565-c535f289fdfe-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.311769 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-q8tlv" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.311771 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-q8tlv" event={"ID":"099ed0a4-89ce-418e-88e5-e93b2831cf94","Type":"ContainerDied","Data":"68325351a1895f0693fde4efa32eefaca74e09409ddf0c85e7db81db09e47374"} Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.311921 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68325351a1895f0693fde4efa32eefaca74e09409ddf0c85e7db81db09e47374" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.314425 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-44ee-account-create-update-zqfb7" event={"ID":"3684c04e-f715-4281-b323-5d0097ddded9","Type":"ContainerDied","Data":"8c528db3954051ea0efc8bcf0142de460a440af0406865004cdf238b25a35f39"} Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.314449 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-44ee-account-create-update-zqfb7" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.314464 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c528db3954051ea0efc8bcf0142de460a440af0406865004cdf238b25a35f39" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.316355 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3187-account-create-update-p79p5" event={"ID":"168afe57-1b0e-4ace-8565-c535f289fdfe","Type":"ContainerDied","Data":"06afe3c387dd68a26adf536f96c6055a6f59ed23ce1d3f2204ab6004e9856811"} Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.316392 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06afe3c387dd68a26adf536f96c6055a6f59ed23ce1d3f2204ab6004e9856811" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.316402 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3187-account-create-update-p79p5" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.317501 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4qhvd" event={"ID":"e16f96e6-5142-4edf-81bb-0e8eb79728b4","Type":"ContainerDied","Data":"b2df9aa2ec4c527e72efa077bdbf0c1204c8ea792cbbb63fc5118e9e80f2d52d"} Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.317526 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2df9aa2ec4c527e72efa077bdbf0c1204c8ea792cbbb63fc5118e9e80f2d52d" Feb 02 09:10:55 crc kubenswrapper[4747]: I0202 09:10:55.317619 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4qhvd" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.269426 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-pggdq"] Feb 02 09:10:58 crc kubenswrapper[4747]: E0202 09:10:58.270360 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="168afe57-1b0e-4ace-8565-c535f289fdfe" containerName="mariadb-account-create-update" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270377 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="168afe57-1b0e-4ace-8565-c535f289fdfe" containerName="mariadb-account-create-update" Feb 02 09:10:58 crc kubenswrapper[4747]: E0202 09:10:58.270396 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3684c04e-f715-4281-b323-5d0097ddded9" containerName="mariadb-account-create-update" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270405 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3684c04e-f715-4281-b323-5d0097ddded9" containerName="mariadb-account-create-update" Feb 02 09:10:58 crc kubenswrapper[4747]: E0202 09:10:58.270419 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12c98ada-7b2c-458c-b998-998febe86c53" containerName="dnsmasq-dns" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270429 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="12c98ada-7b2c-458c-b998-998febe86c53" containerName="dnsmasq-dns" Feb 02 09:10:58 crc kubenswrapper[4747]: E0202 09:10:58.270461 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16f96e6-5142-4edf-81bb-0e8eb79728b4" containerName="mariadb-database-create" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270468 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16f96e6-5142-4edf-81bb-0e8eb79728b4" containerName="mariadb-database-create" Feb 02 09:10:58 crc kubenswrapper[4747]: E0202 09:10:58.270479 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="099ed0a4-89ce-418e-88e5-e93b2831cf94" containerName="mariadb-database-create" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270486 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="099ed0a4-89ce-418e-88e5-e93b2831cf94" containerName="mariadb-database-create" Feb 02 09:10:58 crc kubenswrapper[4747]: E0202 09:10:58.270502 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12c98ada-7b2c-458c-b998-998febe86c53" containerName="init" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270509 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="12c98ada-7b2c-458c-b998-998febe86c53" containerName="init" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270665 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="099ed0a4-89ce-418e-88e5-e93b2831cf94" containerName="mariadb-database-create" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270676 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="168afe57-1b0e-4ace-8565-c535f289fdfe" containerName="mariadb-account-create-update" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270689 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3684c04e-f715-4281-b323-5d0097ddded9" containerName="mariadb-account-create-update" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270699 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="12c98ada-7b2c-458c-b998-998febe86c53" containerName="dnsmasq-dns" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.270708 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16f96e6-5142-4edf-81bb-0e8eb79728b4" containerName="mariadb-database-create" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.271236 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.276688 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-pggdq"] Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.278739 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.394898 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-operator-scripts\") pod \"root-account-create-update-pggdq\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.395016 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5dqz\" (UniqueName: \"kubernetes.io/projected/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-kube-api-access-l5dqz\") pod \"root-account-create-update-pggdq\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.496874 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5dqz\" (UniqueName: \"kubernetes.io/projected/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-kube-api-access-l5dqz\") pod \"root-account-create-update-pggdq\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.497086 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-operator-scripts\") pod \"root-account-create-update-pggdq\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.497948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-operator-scripts\") pod \"root-account-create-update-pggdq\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.520275 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5dqz\" (UniqueName: \"kubernetes.io/projected/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-kube-api-access-l5dqz\") pod \"root-account-create-update-pggdq\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.585903 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pggdq" Feb 02 09:10:58 crc kubenswrapper[4747]: I0202 09:10:58.654963 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 02 09:10:59 crc kubenswrapper[4747]: I0202 09:10:59.005646 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:10:59 crc kubenswrapper[4747]: E0202 09:10:59.005833 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 09:10:59 crc kubenswrapper[4747]: E0202 09:10:59.005850 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 09:10:59 crc kubenswrapper[4747]: E0202 09:10:59.005901 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift podName:30a3d44f-2ad1-4d00-824e-1e1cdaa048ad nodeName:}" failed. No retries permitted until 2026-02-02 09:11:15.005885729 +0000 UTC m=+887.550224162 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift") pod "swift-storage-0" (UID: "30a3d44f-2ad1-4d00-824e-1e1cdaa048ad") : configmap "swift-ring-files" not found Feb 02 09:10:59 crc kubenswrapper[4747]: I0202 09:10:59.352399 4747 generic.go:334] "Generic (PLEG): container finished" podID="662dc11b-2635-4ef2-a109-900fa5c109fb" containerID="f7c8953830bb936f35b9689c6ba45ba536b885f6fa9c859bd4c77ce2058532dd" exitCode=0 Feb 02 09:10:59 crc kubenswrapper[4747]: I0202 09:10:59.352461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tzwjl" event={"ID":"662dc11b-2635-4ef2-a109-900fa5c109fb","Type":"ContainerDied","Data":"f7c8953830bb936f35b9689c6ba45ba536b885f6fa9c859bd4c77ce2058532dd"} Feb 02 09:11:00 crc kubenswrapper[4747]: I0202 09:11:00.335745 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-vhb5g" podUID="6054cae9-07d3-4de6-ad28-2be1334c85c5" containerName="ovn-controller" probeResult="failure" output=< Feb 02 09:11:00 crc kubenswrapper[4747]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 02 09:11:00 crc kubenswrapper[4747]: > Feb 02 09:11:02 crc kubenswrapper[4747]: I0202 09:11:02.375525 4747 generic.go:334] "Generic (PLEG): container finished" podID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerID="8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085" exitCode=0 Feb 02 09:11:02 crc kubenswrapper[4747]: I0202 09:11:02.375578 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"94de1c50-3e73-4d3c-9f71-194b0fd4f005","Type":"ContainerDied","Data":"8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085"} Feb 02 09:11:02 crc kubenswrapper[4747]: I0202 09:11:02.380568 4747 generic.go:334] "Generic (PLEG): container finished" podID="c9627032-9b68-4e48-8372-fabd9de3d289" containerID="2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40" exitCode=0 Feb 02 09:11:02 crc kubenswrapper[4747]: I0202 09:11:02.380648 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c9627032-9b68-4e48-8372-fabd9de3d289","Type":"ContainerDied","Data":"2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40"} Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.087571 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.201132 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-swiftconf\") pod \"662dc11b-2635-4ef2-a109-900fa5c109fb\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.201732 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-dispersionconf\") pod \"662dc11b-2635-4ef2-a109-900fa5c109fb\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.201787 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-combined-ca-bundle\") pod \"662dc11b-2635-4ef2-a109-900fa5c109fb\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.201816 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-ring-data-devices\") pod \"662dc11b-2635-4ef2-a109-900fa5c109fb\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.201870 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4bs2\" (UniqueName: \"kubernetes.io/projected/662dc11b-2635-4ef2-a109-900fa5c109fb-kube-api-access-l4bs2\") pod \"662dc11b-2635-4ef2-a109-900fa5c109fb\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.201948 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-scripts\") pod \"662dc11b-2635-4ef2-a109-900fa5c109fb\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.202041 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/662dc11b-2635-4ef2-a109-900fa5c109fb-etc-swift\") pod \"662dc11b-2635-4ef2-a109-900fa5c109fb\" (UID: \"662dc11b-2635-4ef2-a109-900fa5c109fb\") " Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.203347 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "662dc11b-2635-4ef2-a109-900fa5c109fb" (UID: "662dc11b-2635-4ef2-a109-900fa5c109fb"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.203866 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/662dc11b-2635-4ef2-a109-900fa5c109fb-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "662dc11b-2635-4ef2-a109-900fa5c109fb" (UID: "662dc11b-2635-4ef2-a109-900fa5c109fb"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.209207 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/662dc11b-2635-4ef2-a109-900fa5c109fb-kube-api-access-l4bs2" (OuterVolumeSpecName: "kube-api-access-l4bs2") pod "662dc11b-2635-4ef2-a109-900fa5c109fb" (UID: "662dc11b-2635-4ef2-a109-900fa5c109fb"). InnerVolumeSpecName "kube-api-access-l4bs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.212173 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "662dc11b-2635-4ef2-a109-900fa5c109fb" (UID: "662dc11b-2635-4ef2-a109-900fa5c109fb"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.225374 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-scripts" (OuterVolumeSpecName: "scripts") pod "662dc11b-2635-4ef2-a109-900fa5c109fb" (UID: "662dc11b-2635-4ef2-a109-900fa5c109fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.225958 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "662dc11b-2635-4ef2-a109-900fa5c109fb" (UID: "662dc11b-2635-4ef2-a109-900fa5c109fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.227171 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "662dc11b-2635-4ef2-a109-900fa5c109fb" (UID: "662dc11b-2635-4ef2-a109-900fa5c109fb"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.304442 4747 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.304743 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4bs2\" (UniqueName: \"kubernetes.io/projected/662dc11b-2635-4ef2-a109-900fa5c109fb-kube-api-access-l4bs2\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.304758 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/662dc11b-2635-4ef2-a109-900fa5c109fb-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.304767 4747 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/662dc11b-2635-4ef2-a109-900fa5c109fb-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.304777 4747 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.304784 4747 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.304795 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/662dc11b-2635-4ef2-a109-900fa5c109fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.374187 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-pggdq"] Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.395543 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c9627032-9b68-4e48-8372-fabd9de3d289","Type":"ContainerStarted","Data":"230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33"} Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.396310 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.398391 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"94de1c50-3e73-4d3c-9f71-194b0fd4f005","Type":"ContainerStarted","Data":"b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130"} Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.398812 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.400718 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-pggdq" event={"ID":"ea9cec41-47c9-4a62-a268-aa2f8e9996b0","Type":"ContainerStarted","Data":"3662a2321ff9531daf817b2fc89244f9b2ddbe0ebcf1246fe88d77c4ceb7261d"} Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.402085 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tzwjl" event={"ID":"662dc11b-2635-4ef2-a109-900fa5c109fb","Type":"ContainerDied","Data":"bbdbd4f7a9f861967ac36d6234f2bf8c1e3cc88d6aac2722e240466ae7fcd4ca"} Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.402104 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbdbd4f7a9f861967ac36d6234f2bf8c1e3cc88d6aac2722e240466ae7fcd4ca" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.402150 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tzwjl" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.427101 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=50.15485212 podStartE2EDuration="59.427081124s" podCreationTimestamp="2026-02-02 09:10:05 +0000 UTC" firstStartedPulling="2026-02-02 09:10:17.940849391 +0000 UTC m=+830.485187834" lastFinishedPulling="2026-02-02 09:10:27.213078365 +0000 UTC m=+839.757416838" observedRunningTime="2026-02-02 09:11:04.421849144 +0000 UTC m=+876.966187587" watchObservedRunningTime="2026-02-02 09:11:04.427081124 +0000 UTC m=+876.971419567" Feb 02 09:11:04 crc kubenswrapper[4747]: I0202 09:11:04.450071 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=51.463717236 podStartE2EDuration="59.450055607s" podCreationTimestamp="2026-02-02 09:10:05 +0000 UTC" firstStartedPulling="2026-02-02 09:10:18.655184342 +0000 UTC m=+831.199522775" lastFinishedPulling="2026-02-02 09:10:26.641522703 +0000 UTC m=+839.185861146" observedRunningTime="2026-02-02 09:11:04.444607791 +0000 UTC m=+876.988946234" watchObservedRunningTime="2026-02-02 09:11:04.450055607 +0000 UTC m=+876.994394040" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.333656 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-vhb5g" podUID="6054cae9-07d3-4de6-ad28-2be1334c85c5" containerName="ovn-controller" probeResult="failure" output=< Feb 02 09:11:05 crc kubenswrapper[4747]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 02 09:11:05 crc kubenswrapper[4747]: > Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.370395 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.388961 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bfdw2" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.417565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5xc45" event={"ID":"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885","Type":"ContainerStarted","Data":"22329a8b2f1604e91d5059f78607d8b677dcd17343278988e9b33d80ab195ae1"} Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.421353 4747 generic.go:334] "Generic (PLEG): container finished" podID="ea9cec41-47c9-4a62-a268-aa2f8e9996b0" containerID="5e44d2b3d6150edd516f232cd1e31d1d3dbde347305d39448096bffa8f04f938" exitCode=0 Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.421381 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-pggdq" event={"ID":"ea9cec41-47c9-4a62-a268-aa2f8e9996b0","Type":"ContainerDied","Data":"5e44d2b3d6150edd516f232cd1e31d1d3dbde347305d39448096bffa8f04f938"} Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.442928 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-5xc45" podStartSLOduration=3.8477838970000002 podStartE2EDuration="15.442908353s" podCreationTimestamp="2026-02-02 09:10:50 +0000 UTC" firstStartedPulling="2026-02-02 09:10:52.409682435 +0000 UTC m=+864.954020868" lastFinishedPulling="2026-02-02 09:11:04.004806891 +0000 UTC m=+876.549145324" observedRunningTime="2026-02-02 09:11:05.43439319 +0000 UTC m=+877.978731623" watchObservedRunningTime="2026-02-02 09:11:05.442908353 +0000 UTC m=+877.987246786" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.615232 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-vhb5g-config-7qvgw"] Feb 02 09:11:05 crc kubenswrapper[4747]: E0202 09:11:05.615622 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="662dc11b-2635-4ef2-a109-900fa5c109fb" containerName="swift-ring-rebalance" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.615645 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="662dc11b-2635-4ef2-a109-900fa5c109fb" containerName="swift-ring-rebalance" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.615839 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="662dc11b-2635-4ef2-a109-900fa5c109fb" containerName="swift-ring-rebalance" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.616492 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.618594 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.626713 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vhb5g-config-7qvgw"] Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.726351 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc8wl\" (UniqueName: \"kubernetes.io/projected/1281ed52-5a34-443d-b7c2-6293c3141a6d-kube-api-access-wc8wl\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.726546 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.726686 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run-ovn\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.726782 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-log-ovn\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.726833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-additional-scripts\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.726999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-scripts\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.828827 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-log-ovn\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.829449 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-additional-scripts\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.829374 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-log-ovn\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.829608 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-scripts\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.830246 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-additional-scripts\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.831793 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-scripts\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.831923 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc8wl\" (UniqueName: \"kubernetes.io/projected/1281ed52-5a34-443d-b7c2-6293c3141a6d-kube-api-access-wc8wl\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.832818 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.832914 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run-ovn\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.832972 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run-ovn\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.832926 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.850070 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc8wl\" (UniqueName: \"kubernetes.io/projected/1281ed52-5a34-443d-b7c2-6293c3141a6d-kube-api-access-wc8wl\") pod \"ovn-controller-vhb5g-config-7qvgw\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:05 crc kubenswrapper[4747]: I0202 09:11:05.935900 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.388459 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-vhb5g-config-7qvgw"] Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.429592 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vhb5g-config-7qvgw" event={"ID":"1281ed52-5a34-443d-b7c2-6293c3141a6d","Type":"ContainerStarted","Data":"16f3c237c3c1be4958e4262fe040406e9d0b96e6488ab2b728be99a256bc212a"} Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.703011 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pggdq" Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.857403 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-operator-scripts\") pod \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.857476 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5dqz\" (UniqueName: \"kubernetes.io/projected/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-kube-api-access-l5dqz\") pod \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\" (UID: \"ea9cec41-47c9-4a62-a268-aa2f8e9996b0\") " Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.858400 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ea9cec41-47c9-4a62-a268-aa2f8e9996b0" (UID: "ea9cec41-47c9-4a62-a268-aa2f8e9996b0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.863438 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-kube-api-access-l5dqz" (OuterVolumeSpecName: "kube-api-access-l5dqz") pod "ea9cec41-47c9-4a62-a268-aa2f8e9996b0" (UID: "ea9cec41-47c9-4a62-a268-aa2f8e9996b0"). InnerVolumeSpecName "kube-api-access-l5dqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.960120 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:06 crc kubenswrapper[4747]: I0202 09:11:06.960152 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5dqz\" (UniqueName: \"kubernetes.io/projected/ea9cec41-47c9-4a62-a268-aa2f8e9996b0-kube-api-access-l5dqz\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:07 crc kubenswrapper[4747]: I0202 09:11:07.438919 4747 generic.go:334] "Generic (PLEG): container finished" podID="1281ed52-5a34-443d-b7c2-6293c3141a6d" containerID="79285de4a6d3b493c36dbf68549cb562a6eb49ec7d8c601bc5e254cf272f1d7b" exitCode=0 Feb 02 09:11:07 crc kubenswrapper[4747]: I0202 09:11:07.439068 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vhb5g-config-7qvgw" event={"ID":"1281ed52-5a34-443d-b7c2-6293c3141a6d","Type":"ContainerDied","Data":"79285de4a6d3b493c36dbf68549cb562a6eb49ec7d8c601bc5e254cf272f1d7b"} Feb 02 09:11:07 crc kubenswrapper[4747]: I0202 09:11:07.441081 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-pggdq" event={"ID":"ea9cec41-47c9-4a62-a268-aa2f8e9996b0","Type":"ContainerDied","Data":"3662a2321ff9531daf817b2fc89244f9b2ddbe0ebcf1246fe88d77c4ceb7261d"} Feb 02 09:11:07 crc kubenswrapper[4747]: I0202 09:11:07.441116 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3662a2321ff9531daf817b2fc89244f9b2ddbe0ebcf1246fe88d77c4ceb7261d" Feb 02 09:11:07 crc kubenswrapper[4747]: I0202 09:11:07.441152 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-pggdq" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.788619 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.790235 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run-ovn\") pod \"1281ed52-5a34-443d-b7c2-6293c3141a6d\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.790351 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc8wl\" (UniqueName: \"kubernetes.io/projected/1281ed52-5a34-443d-b7c2-6293c3141a6d-kube-api-access-wc8wl\") pod \"1281ed52-5a34-443d-b7c2-6293c3141a6d\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.790406 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-additional-scripts\") pod \"1281ed52-5a34-443d-b7c2-6293c3141a6d\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.790351 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1281ed52-5a34-443d-b7c2-6293c3141a6d" (UID: "1281ed52-5a34-443d-b7c2-6293c3141a6d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.790435 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run\") pod \"1281ed52-5a34-443d-b7c2-6293c3141a6d\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.790638 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run" (OuterVolumeSpecName: "var-run") pod "1281ed52-5a34-443d-b7c2-6293c3141a6d" (UID: "1281ed52-5a34-443d-b7c2-6293c3141a6d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.791130 4747 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.791189 4747 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.791231 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "1281ed52-5a34-443d-b7c2-6293c3141a6d" (UID: "1281ed52-5a34-443d-b7c2-6293c3141a6d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.796604 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1281ed52-5a34-443d-b7c2-6293c3141a6d-kube-api-access-wc8wl" (OuterVolumeSpecName: "kube-api-access-wc8wl") pod "1281ed52-5a34-443d-b7c2-6293c3141a6d" (UID: "1281ed52-5a34-443d-b7c2-6293c3141a6d"). InnerVolumeSpecName "kube-api-access-wc8wl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.891907 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-log-ovn\") pod \"1281ed52-5a34-443d-b7c2-6293c3141a6d\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.891997 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-scripts\") pod \"1281ed52-5a34-443d-b7c2-6293c3141a6d\" (UID: \"1281ed52-5a34-443d-b7c2-6293c3141a6d\") " Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.892070 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1281ed52-5a34-443d-b7c2-6293c3141a6d" (UID: "1281ed52-5a34-443d-b7c2-6293c3141a6d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.892604 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc8wl\" (UniqueName: \"kubernetes.io/projected/1281ed52-5a34-443d-b7c2-6293c3141a6d-kube-api-access-wc8wl\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.892629 4747 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.892638 4747 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1281ed52-5a34-443d-b7c2-6293c3141a6d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.893454 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-scripts" (OuterVolumeSpecName: "scripts") pod "1281ed52-5a34-443d-b7c2-6293c3141a6d" (UID: "1281ed52-5a34-443d-b7c2-6293c3141a6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:08 crc kubenswrapper[4747]: I0202 09:11:08.994063 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1281ed52-5a34-443d-b7c2-6293c3141a6d-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:09 crc kubenswrapper[4747]: I0202 09:11:09.459677 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-vhb5g-config-7qvgw" event={"ID":"1281ed52-5a34-443d-b7c2-6293c3141a6d","Type":"ContainerDied","Data":"16f3c237c3c1be4958e4262fe040406e9d0b96e6488ab2b728be99a256bc212a"} Feb 02 09:11:09 crc kubenswrapper[4747]: I0202 09:11:09.459721 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16f3c237c3c1be4958e4262fe040406e9d0b96e6488ab2b728be99a256bc212a" Feb 02 09:11:09 crc kubenswrapper[4747]: I0202 09:11:09.459785 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-vhb5g-config-7qvgw" Feb 02 09:11:09 crc kubenswrapper[4747]: E0202 09:11:09.553683 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1281ed52_5a34_443d_b7c2_6293c3141a6d.slice/crio-16f3c237c3c1be4958e4262fe040406e9d0b96e6488ab2b728be99a256bc212a\": RecentStats: unable to find data in memory cache]" Feb 02 09:11:09 crc kubenswrapper[4747]: I0202 09:11:09.905011 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-vhb5g-config-7qvgw"] Feb 02 09:11:09 crc kubenswrapper[4747]: I0202 09:11:09.912466 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-vhb5g-config-7qvgw"] Feb 02 09:11:10 crc kubenswrapper[4747]: I0202 09:11:10.327579 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-vhb5g" Feb 02 09:11:10 crc kubenswrapper[4747]: I0202 09:11:10.350527 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1281ed52-5a34-443d-b7c2-6293c3141a6d" path="/var/lib/kubelet/pods/1281ed52-5a34-443d-b7c2-6293c3141a6d/volumes" Feb 02 09:11:10 crc kubenswrapper[4747]: I0202 09:11:10.469350 4747 generic.go:334] "Generic (PLEG): container finished" podID="d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" containerID="22329a8b2f1604e91d5059f78607d8b677dcd17343278988e9b33d80ab195ae1" exitCode=0 Feb 02 09:11:10 crc kubenswrapper[4747]: I0202 09:11:10.469404 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5xc45" event={"ID":"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885","Type":"ContainerDied","Data":"22329a8b2f1604e91d5059f78607d8b677dcd17343278988e9b33d80ab195ae1"} Feb 02 09:11:11 crc kubenswrapper[4747]: I0202 09:11:11.868214 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5xc45" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.034499 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-db-sync-config-data\") pod \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.034569 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rctcr\" (UniqueName: \"kubernetes.io/projected/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-kube-api-access-rctcr\") pod \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.034604 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-config-data\") pod \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.034620 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-combined-ca-bundle\") pod \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\" (UID: \"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885\") " Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.040424 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" (UID: "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.040724 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-kube-api-access-rctcr" (OuterVolumeSpecName: "kube-api-access-rctcr") pod "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" (UID: "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885"). InnerVolumeSpecName "kube-api-access-rctcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.057856 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" (UID: "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.088267 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-config-data" (OuterVolumeSpecName: "config-data") pod "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" (UID: "d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.137361 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.137402 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rctcr\" (UniqueName: \"kubernetes.io/projected/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-kube-api-access-rctcr\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.137420 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.137431 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.486381 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5xc45" event={"ID":"d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885","Type":"ContainerDied","Data":"52911384e1f3db351d9624d4bf25c10607ce9053b1604f8b3e7e86486a8f6c73"} Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.486624 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52911384e1f3db351d9624d4bf25c10607ce9053b1604f8b3e7e86486a8f6c73" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.486440 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5xc45" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.868168 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-c74xs"] Feb 02 09:11:12 crc kubenswrapper[4747]: E0202 09:11:12.868469 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea9cec41-47c9-4a62-a268-aa2f8e9996b0" containerName="mariadb-account-create-update" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.868486 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea9cec41-47c9-4a62-a268-aa2f8e9996b0" containerName="mariadb-account-create-update" Feb 02 09:11:12 crc kubenswrapper[4747]: E0202 09:11:12.868499 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1281ed52-5a34-443d-b7c2-6293c3141a6d" containerName="ovn-config" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.868506 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1281ed52-5a34-443d-b7c2-6293c3141a6d" containerName="ovn-config" Feb 02 09:11:12 crc kubenswrapper[4747]: E0202 09:11:12.868525 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" containerName="glance-db-sync" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.868531 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" containerName="glance-db-sync" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.868669 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" containerName="glance-db-sync" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.868683 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="1281ed52-5a34-443d-b7c2-6293c3141a6d" containerName="ovn-config" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.868695 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea9cec41-47c9-4a62-a268-aa2f8e9996b0" containerName="mariadb-account-create-update" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.871150 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.898782 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-c74xs"] Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.955421 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5k9r\" (UniqueName: \"kubernetes.io/projected/6ad6076b-9427-4d7c-8219-357cd2e45b4b-kube-api-access-w5k9r\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.955461 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-sb\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.955486 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-dns-svc\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.955599 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-nb\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:12 crc kubenswrapper[4747]: I0202 09:11:12.955622 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-config\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.057757 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-nb\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.057811 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-config\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.057871 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5k9r\" (UniqueName: \"kubernetes.io/projected/6ad6076b-9427-4d7c-8219-357cd2e45b4b-kube-api-access-w5k9r\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.057897 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-sb\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.057926 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-dns-svc\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.058773 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-nb\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.058853 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-sb\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.059295 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-config\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.059318 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-dns-svc\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.079977 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5k9r\" (UniqueName: \"kubernetes.io/projected/6ad6076b-9427-4d7c-8219-357cd2e45b4b-kube-api-access-w5k9r\") pod \"dnsmasq-dns-5b946c75cc-c74xs\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.186185 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:13 crc kubenswrapper[4747]: I0202 09:11:13.675795 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-c74xs"] Feb 02 09:11:14 crc kubenswrapper[4747]: I0202 09:11:14.509592 4747 generic.go:334] "Generic (PLEG): container finished" podID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerID="5c11dd1d7a6bec2e5eaa3f10427288deb78c985e5e505ffb3ff94112c934e436" exitCode=0 Feb 02 09:11:14 crc kubenswrapper[4747]: I0202 09:11:14.509650 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" event={"ID":"6ad6076b-9427-4d7c-8219-357cd2e45b4b","Type":"ContainerDied","Data":"5c11dd1d7a6bec2e5eaa3f10427288deb78c985e5e505ffb3ff94112c934e436"} Feb 02 09:11:14 crc kubenswrapper[4747]: I0202 09:11:14.509906 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" event={"ID":"6ad6076b-9427-4d7c-8219-357cd2e45b4b","Type":"ContainerStarted","Data":"a0ea834c9eda4dce074933060422e2253230de3d33d06c14c3315be2f5b9045e"} Feb 02 09:11:15 crc kubenswrapper[4747]: I0202 09:11:15.096261 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:11:15 crc kubenswrapper[4747]: I0202 09:11:15.101904 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30a3d44f-2ad1-4d00-824e-1e1cdaa048ad-etc-swift\") pod \"swift-storage-0\" (UID: \"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad\") " pod="openstack/swift-storage-0" Feb 02 09:11:15 crc kubenswrapper[4747]: I0202 09:11:15.190823 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 02 09:11:15 crc kubenswrapper[4747]: I0202 09:11:15.530240 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" event={"ID":"6ad6076b-9427-4d7c-8219-357cd2e45b4b","Type":"ContainerStarted","Data":"2a15e11eea5e8c2b20655968369371d898a8496fad6a614e52ceccfd07d54791"} Feb 02 09:11:15 crc kubenswrapper[4747]: I0202 09:11:15.530567 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:15 crc kubenswrapper[4747]: I0202 09:11:15.549311 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" podStartSLOduration=3.549293346 podStartE2EDuration="3.549293346s" podCreationTimestamp="2026-02-02 09:11:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:15.546060565 +0000 UTC m=+888.090398998" watchObservedRunningTime="2026-02-02 09:11:15.549293346 +0000 UTC m=+888.093631779" Feb 02 09:11:15 crc kubenswrapper[4747]: I0202 09:11:15.736702 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 02 09:11:16 crc kubenswrapper[4747]: I0202 09:11:16.543063 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"18b7e3ddc2c341617b2672be9fe2b16ffd486e32d36a2b5c91c8ac62d691303c"} Feb 02 09:11:16 crc kubenswrapper[4747]: I0202 09:11:16.665326 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 09:11:16 crc kubenswrapper[4747]: I0202 09:11:16.951147 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.035829 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-vnmfj"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.046979 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-cbf0-account-create-update-bfwtn"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.048647 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.049251 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.052553 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.065467 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cbf0-account-create-update-bfwtn"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.108646 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-vnmfj"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.211702 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-bd6f7"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.212834 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.229269 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-f497-account-create-update-79mgp"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.230403 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.233239 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpzb6\" (UniqueName: \"kubernetes.io/projected/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-kube-api-access-qpzb6\") pod \"barbican-cbf0-account-create-update-bfwtn\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.233354 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/722ee392-b285-4b3f-9e61-034d352069a0-operator-scripts\") pod \"cinder-db-create-vnmfj\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.233480 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9xqz\" (UniqueName: \"kubernetes.io/projected/722ee392-b285-4b3f-9e61-034d352069a0-kube-api-access-d9xqz\") pod \"cinder-db-create-vnmfj\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.233516 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-operator-scripts\") pod \"barbican-cbf0-account-create-update-bfwtn\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.233760 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.239529 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-bd6f7"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.245711 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f497-account-create-update-79mgp"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.316729 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-c8j7m"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.317819 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.319911 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.320481 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.321819 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hr2fd" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.324239 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-c8j7m"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.326022 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.343884 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/722ee392-b285-4b3f-9e61-034d352069a0-operator-scripts\") pod \"cinder-db-create-vnmfj\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344019 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9xqz\" (UniqueName: \"kubernetes.io/projected/722ee392-b285-4b3f-9e61-034d352069a0-kube-api-access-d9xqz\") pod \"cinder-db-create-vnmfj\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344057 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-operator-scripts\") pod \"barbican-cbf0-account-create-update-bfwtn\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344100 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhw7q\" (UniqueName: \"kubernetes.io/projected/38596e39-b23f-4670-8927-c8cab809a25b-kube-api-access-jhw7q\") pod \"cinder-f497-account-create-update-79mgp\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38596e39-b23f-4670-8927-c8cab809a25b-operator-scripts\") pod \"cinder-f497-account-create-update-79mgp\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344188 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7652f270-f57b-4e46-9171-79dcaa5975e0-operator-scripts\") pod \"barbican-db-create-bd6f7\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344236 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb65d\" (UniqueName: \"kubernetes.io/projected/7652f270-f57b-4e46-9171-79dcaa5975e0-kube-api-access-lb65d\") pod \"barbican-db-create-bd6f7\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344280 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpzb6\" (UniqueName: \"kubernetes.io/projected/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-kube-api-access-qpzb6\") pod \"barbican-cbf0-account-create-update-bfwtn\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.344739 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/722ee392-b285-4b3f-9e61-034d352069a0-operator-scripts\") pod \"cinder-db-create-vnmfj\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.345324 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-operator-scripts\") pod \"barbican-cbf0-account-create-update-bfwtn\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.370427 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9xqz\" (UniqueName: \"kubernetes.io/projected/722ee392-b285-4b3f-9e61-034d352069a0-kube-api-access-d9xqz\") pod \"cinder-db-create-vnmfj\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.372192 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpzb6\" (UniqueName: \"kubernetes.io/projected/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-kube-api-access-qpzb6\") pod \"barbican-cbf0-account-create-update-bfwtn\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.382488 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.412861 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.417094 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-n9vgl"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.418318 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.424984 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-n9vgl"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.445913 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhw7q\" (UniqueName: \"kubernetes.io/projected/38596e39-b23f-4670-8927-c8cab809a25b-kube-api-access-jhw7q\") pod \"cinder-f497-account-create-update-79mgp\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.446005 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38596e39-b23f-4670-8927-c8cab809a25b-operator-scripts\") pod \"cinder-f497-account-create-update-79mgp\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.446028 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-combined-ca-bundle\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.446055 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xfjd\" (UniqueName: \"kubernetes.io/projected/a92c859d-6661-4ed3-888b-267a50ed2894-kube-api-access-2xfjd\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.446078 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7652f270-f57b-4e46-9171-79dcaa5975e0-operator-scripts\") pod \"barbican-db-create-bd6f7\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.446108 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb65d\" (UniqueName: \"kubernetes.io/projected/7652f270-f57b-4e46-9171-79dcaa5975e0-kube-api-access-lb65d\") pod \"barbican-db-create-bd6f7\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.446131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-config-data\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.447909 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7652f270-f57b-4e46-9171-79dcaa5975e0-operator-scripts\") pod \"barbican-db-create-bd6f7\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.447964 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38596e39-b23f-4670-8927-c8cab809a25b-operator-scripts\") pod \"cinder-f497-account-create-update-79mgp\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.467869 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb65d\" (UniqueName: \"kubernetes.io/projected/7652f270-f57b-4e46-9171-79dcaa5975e0-kube-api-access-lb65d\") pod \"barbican-db-create-bd6f7\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.477470 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhw7q\" (UniqueName: \"kubernetes.io/projected/38596e39-b23f-4670-8927-c8cab809a25b-kube-api-access-jhw7q\") pod \"cinder-f497-account-create-update-79mgp\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.522770 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-cd8b-account-create-update-4jdhl"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.523692 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.525379 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.534669 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.547289 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-combined-ca-bundle\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.547407 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.548512 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cd8b-account-create-update-4jdhl"] Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.555726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-combined-ca-bundle\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.559136 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f2ed687-bc3f-4543-b5d5-6db15856198e-operator-scripts\") pod \"neutron-db-create-n9vgl\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.559238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szzd4\" (UniqueName: \"kubernetes.io/projected/7f2ed687-bc3f-4543-b5d5-6db15856198e-kube-api-access-szzd4\") pod \"neutron-db-create-n9vgl\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.559282 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xfjd\" (UniqueName: \"kubernetes.io/projected/a92c859d-6661-4ed3-888b-267a50ed2894-kube-api-access-2xfjd\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.559419 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-config-data\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.562690 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-config-data\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.596268 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xfjd\" (UniqueName: \"kubernetes.io/projected/a92c859d-6661-4ed3-888b-267a50ed2894-kube-api-access-2xfjd\") pod \"keystone-db-sync-c8j7m\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.633613 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.666149 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5b470c-277c-413e-9377-a5cf2bfab33e-operator-scripts\") pod \"neutron-cd8b-account-create-update-4jdhl\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.666270 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x4vl\" (UniqueName: \"kubernetes.io/projected/fe5b470c-277c-413e-9377-a5cf2bfab33e-kube-api-access-5x4vl\") pod \"neutron-cd8b-account-create-update-4jdhl\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.666458 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f2ed687-bc3f-4543-b5d5-6db15856198e-operator-scripts\") pod \"neutron-db-create-n9vgl\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.666481 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szzd4\" (UniqueName: \"kubernetes.io/projected/7f2ed687-bc3f-4543-b5d5-6db15856198e-kube-api-access-szzd4\") pod \"neutron-db-create-n9vgl\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.667159 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f2ed687-bc3f-4543-b5d5-6db15856198e-operator-scripts\") pod \"neutron-db-create-n9vgl\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.699433 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szzd4\" (UniqueName: \"kubernetes.io/projected/7f2ed687-bc3f-4543-b5d5-6db15856198e-kube-api-access-szzd4\") pod \"neutron-db-create-n9vgl\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.769013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5b470c-277c-413e-9377-a5cf2bfab33e-operator-scripts\") pod \"neutron-cd8b-account-create-update-4jdhl\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.769057 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x4vl\" (UniqueName: \"kubernetes.io/projected/fe5b470c-277c-413e-9377-a5cf2bfab33e-kube-api-access-5x4vl\") pod \"neutron-cd8b-account-create-update-4jdhl\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.770190 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5b470c-277c-413e-9377-a5cf2bfab33e-operator-scripts\") pod \"neutron-cd8b-account-create-update-4jdhl\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.792780 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x4vl\" (UniqueName: \"kubernetes.io/projected/fe5b470c-277c-413e-9377-a5cf2bfab33e-kube-api-access-5x4vl\") pod \"neutron-cd8b-account-create-update-4jdhl\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.850734 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:17 crc kubenswrapper[4747]: I0202 09:11:17.889255 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.033359 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cbf0-account-create-update-bfwtn"] Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.088661 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-vnmfj"] Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.179147 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-bd6f7"] Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.192543 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-f497-account-create-update-79mgp"] Feb 02 09:11:18 crc kubenswrapper[4747]: W0202 09:11:18.205820 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7652f270_f57b_4e46_9171_79dcaa5975e0.slice/crio-1df0d964b82a997bcc5266349417d0689782eae2a42b4007e67f3668b6547c2c WatchSource:0}: Error finding container 1df0d964b82a997bcc5266349417d0689782eae2a42b4007e67f3668b6547c2c: Status 404 returned error can't find the container with id 1df0d964b82a997bcc5266349417d0689782eae2a42b4007e67f3668b6547c2c Feb 02 09:11:18 crc kubenswrapper[4747]: W0202 09:11:18.207190 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38596e39_b23f_4670_8927_c8cab809a25b.slice/crio-352d9940f08321b45bdd324e49050c853c5521e5eb435eb6dbfba743d8357b45 WatchSource:0}: Error finding container 352d9940f08321b45bdd324e49050c853c5521e5eb435eb6dbfba743d8357b45: Status 404 returned error can't find the container with id 352d9940f08321b45bdd324e49050c853c5521e5eb435eb6dbfba743d8357b45 Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.356213 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-c8j7m"] Feb 02 09:11:18 crc kubenswrapper[4747]: W0202 09:11:18.363682 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda92c859d_6661_4ed3_888b_267a50ed2894.slice/crio-1276c63125c1bfc37342609b25384d756f97d751835f2b744e76bf5a598a2333 WatchSource:0}: Error finding container 1276c63125c1bfc37342609b25384d756f97d751835f2b744e76bf5a598a2333: Status 404 returned error can't find the container with id 1276c63125c1bfc37342609b25384d756f97d751835f2b744e76bf5a598a2333 Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.489426 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-n9vgl"] Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.531673 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cd8b-account-create-update-4jdhl"] Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.572825 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cbf0-account-create-update-bfwtn" event={"ID":"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb","Type":"ContainerStarted","Data":"9e763e05e46e884bed283e3000106dd02ac17c1ff377fa5da802188b4577539b"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.572889 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cbf0-account-create-update-bfwtn" event={"ID":"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb","Type":"ContainerStarted","Data":"144bfc2ecd8356a1e640bcc7c43fa280608fbd4706c59edac62b069ead9e3fc4"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.579403 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-c8j7m" event={"ID":"a92c859d-6661-4ed3-888b-267a50ed2894","Type":"ContainerStarted","Data":"1276c63125c1bfc37342609b25384d756f97d751835f2b744e76bf5a598a2333"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.590454 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-cbf0-account-create-update-bfwtn" podStartSLOduration=1.590439044 podStartE2EDuration="1.590439044s" podCreationTimestamp="2026-02-02 09:11:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:18.587785768 +0000 UTC m=+891.132124201" watchObservedRunningTime="2026-02-02 09:11:18.590439044 +0000 UTC m=+891.134777487" Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.596419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"9d7f367a54593dcf85a78abcc2b12a28b447ea032187ca2dd99ec0fb6478de44"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.596462 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"9a84d5f5e37e45c3e83357e379c7427bbd73e9fec5dccc0ba44f8faf3cd0df88"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.596472 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"7317e54cf274fa65c652c160d6f0cdaca87b970161008c05348ff4724ab58088"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.596481 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"d8bfe47042796ece27d215176d81f523fbdce8ed20c53166ca224cb6f447be54"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.603591 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f497-account-create-update-79mgp" event={"ID":"38596e39-b23f-4670-8927-c8cab809a25b","Type":"ContainerStarted","Data":"c7b4173ab517938f6b17ec879f15c659f321b2f0b309837e7eb03081af0a2eca"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.603648 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f497-account-create-update-79mgp" event={"ID":"38596e39-b23f-4670-8927-c8cab809a25b","Type":"ContainerStarted","Data":"352d9940f08321b45bdd324e49050c853c5521e5eb435eb6dbfba743d8357b45"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.607123 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vnmfj" event={"ID":"722ee392-b285-4b3f-9e61-034d352069a0","Type":"ContainerStarted","Data":"e4f96e29b022f07f539adf785a45ac2cf8e6660578e30394841acdb41ca81d52"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.607172 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vnmfj" event={"ID":"722ee392-b285-4b3f-9e61-034d352069a0","Type":"ContainerStarted","Data":"841ea0ec5d229d48acbbbf406142bd86e9af0d2257d1627020efb27f64f66cf9"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.609032 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n9vgl" event={"ID":"7f2ed687-bc3f-4543-b5d5-6db15856198e","Type":"ContainerStarted","Data":"db9c127e8082cecb86c5cd99870c97564156982f656e85b180a6a68ffa6e7e86"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.611144 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cd8b-account-create-update-4jdhl" event={"ID":"fe5b470c-277c-413e-9377-a5cf2bfab33e","Type":"ContainerStarted","Data":"cd5adb6432a6a8bc61c21f0e5753ed0c05f0f12a72702cb85e06a155a4d0a4df"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.611857 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bd6f7" event={"ID":"7652f270-f57b-4e46-9171-79dcaa5975e0","Type":"ContainerStarted","Data":"1df0d964b82a997bcc5266349417d0689782eae2a42b4007e67f3668b6547c2c"} Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.620526 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-f497-account-create-update-79mgp" podStartSLOduration=1.620512224 podStartE2EDuration="1.620512224s" podCreationTimestamp="2026-02-02 09:11:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:18.618280929 +0000 UTC m=+891.162619362" watchObservedRunningTime="2026-02-02 09:11:18.620512224 +0000 UTC m=+891.164850657" Feb 02 09:11:18 crc kubenswrapper[4747]: I0202 09:11:18.644074 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-vnmfj" podStartSLOduration=1.644055311 podStartE2EDuration="1.644055311s" podCreationTimestamp="2026-02-02 09:11:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:18.635777185 +0000 UTC m=+891.180115618" watchObservedRunningTime="2026-02-02 09:11:18.644055311 +0000 UTC m=+891.188393734" Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.629234 4747 generic.go:334] "Generic (PLEG): container finished" podID="7f2ed687-bc3f-4543-b5d5-6db15856198e" containerID="53ee9304ef5e75d37af4c3a7daa7d928a76668469aac88ac95eb7762869364d6" exitCode=0 Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.629296 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n9vgl" event={"ID":"7f2ed687-bc3f-4543-b5d5-6db15856198e","Type":"ContainerDied","Data":"53ee9304ef5e75d37af4c3a7daa7d928a76668469aac88ac95eb7762869364d6"} Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.632093 4747 generic.go:334] "Generic (PLEG): container finished" podID="fe5b470c-277c-413e-9377-a5cf2bfab33e" containerID="ce909cc02cf3dca12228a169d06d003e73a26c3a53be7b9383808e44cd6d21ed" exitCode=0 Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.632159 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cd8b-account-create-update-4jdhl" event={"ID":"fe5b470c-277c-413e-9377-a5cf2bfab33e","Type":"ContainerDied","Data":"ce909cc02cf3dca12228a169d06d003e73a26c3a53be7b9383808e44cd6d21ed"} Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.637433 4747 generic.go:334] "Generic (PLEG): container finished" podID="7652f270-f57b-4e46-9171-79dcaa5975e0" containerID="8a05e08a12d78c207b87be9ee1d57f882a54d380199c67f8e9987701ac95dc84" exitCode=0 Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.637492 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bd6f7" event={"ID":"7652f270-f57b-4e46-9171-79dcaa5975e0","Type":"ContainerDied","Data":"8a05e08a12d78c207b87be9ee1d57f882a54d380199c67f8e9987701ac95dc84"} Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.639452 4747 generic.go:334] "Generic (PLEG): container finished" podID="dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb" containerID="9e763e05e46e884bed283e3000106dd02ac17c1ff377fa5da802188b4577539b" exitCode=0 Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.639529 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cbf0-account-create-update-bfwtn" event={"ID":"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb","Type":"ContainerDied","Data":"9e763e05e46e884bed283e3000106dd02ac17c1ff377fa5da802188b4577539b"} Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.641325 4747 generic.go:334] "Generic (PLEG): container finished" podID="38596e39-b23f-4670-8927-c8cab809a25b" containerID="c7b4173ab517938f6b17ec879f15c659f321b2f0b309837e7eb03081af0a2eca" exitCode=0 Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.641446 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f497-account-create-update-79mgp" event={"ID":"38596e39-b23f-4670-8927-c8cab809a25b","Type":"ContainerDied","Data":"c7b4173ab517938f6b17ec879f15c659f321b2f0b309837e7eb03081af0a2eca"} Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.642888 4747 generic.go:334] "Generic (PLEG): container finished" podID="722ee392-b285-4b3f-9e61-034d352069a0" containerID="e4f96e29b022f07f539adf785a45ac2cf8e6660578e30394841acdb41ca81d52" exitCode=0 Feb 02 09:11:19 crc kubenswrapper[4747]: I0202 09:11:19.643044 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vnmfj" event={"ID":"722ee392-b285-4b3f-9e61-034d352069a0","Type":"ContainerDied","Data":"e4f96e29b022f07f539adf785a45ac2cf8e6660578e30394841acdb41ca81d52"} Feb 02 09:11:20 crc kubenswrapper[4747]: I0202 09:11:20.668865 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"3d745626408e5cbcac9dc5a0ac7f5770ce0ddbcd1ca1fa14690b55e74f6ae362"} Feb 02 09:11:20 crc kubenswrapper[4747]: I0202 09:11:20.669257 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"40e5415a4492f9608b285ef3b6fc0650981c96cfa80c61af6805660cbdaa63f2"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.188109 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.244628 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qk4l8"] Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.245032 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-qk4l8" podUID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerName="dnsmasq-dns" containerID="cri-o://4017961c9d3c9c26b4cf373b2e9a3699867f8bd938474db5a5bca63d01f3c3d3" gracePeriod=10 Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.365719 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.479695 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.484620 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5b470c-277c-413e-9377-a5cf2bfab33e-operator-scripts\") pod \"fe5b470c-277c-413e-9377-a5cf2bfab33e\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.484685 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5x4vl\" (UniqueName: \"kubernetes.io/projected/fe5b470c-277c-413e-9377-a5cf2bfab33e-kube-api-access-5x4vl\") pod \"fe5b470c-277c-413e-9377-a5cf2bfab33e\" (UID: \"fe5b470c-277c-413e-9377-a5cf2bfab33e\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.485767 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe5b470c-277c-413e-9377-a5cf2bfab33e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fe5b470c-277c-413e-9377-a5cf2bfab33e" (UID: "fe5b470c-277c-413e-9377-a5cf2bfab33e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.486439 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.490929 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe5b470c-277c-413e-9377-a5cf2bfab33e-kube-api-access-5x4vl" (OuterVolumeSpecName: "kube-api-access-5x4vl") pod "fe5b470c-277c-413e-9377-a5cf2bfab33e" (UID: "fe5b470c-277c-413e-9377-a5cf2bfab33e"). InnerVolumeSpecName "kube-api-access-5x4vl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.500153 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.507892 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.521263 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.586758 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-operator-scripts\") pod \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.587521 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb" (UID: "dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.587589 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f2ed687-bc3f-4543-b5d5-6db15856198e-operator-scripts\") pod \"7f2ed687-bc3f-4543-b5d5-6db15856198e\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.587752 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpzb6\" (UniqueName: \"kubernetes.io/projected/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-kube-api-access-qpzb6\") pod \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\" (UID: \"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.588103 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f2ed687-bc3f-4543-b5d5-6db15856198e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7f2ed687-bc3f-4543-b5d5-6db15856198e" (UID: "7f2ed687-bc3f-4543-b5d5-6db15856198e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.588330 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38596e39-b23f-4670-8927-c8cab809a25b-operator-scripts\") pod \"38596e39-b23f-4670-8927-c8cab809a25b\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.588357 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7652f270-f57b-4e46-9171-79dcaa5975e0-operator-scripts\") pod \"7652f270-f57b-4e46-9171-79dcaa5975e0\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.588679 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38596e39-b23f-4670-8927-c8cab809a25b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "38596e39-b23f-4670-8927-c8cab809a25b" (UID: "38596e39-b23f-4670-8927-c8cab809a25b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.588907 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7652f270-f57b-4e46-9171-79dcaa5975e0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7652f270-f57b-4e46-9171-79dcaa5975e0" (UID: "7652f270-f57b-4e46-9171-79dcaa5975e0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.589010 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb65d\" (UniqueName: \"kubernetes.io/projected/7652f270-f57b-4e46-9171-79dcaa5975e0-kube-api-access-lb65d\") pod \"7652f270-f57b-4e46-9171-79dcaa5975e0\" (UID: \"7652f270-f57b-4e46-9171-79dcaa5975e0\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.589048 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9xqz\" (UniqueName: \"kubernetes.io/projected/722ee392-b285-4b3f-9e61-034d352069a0-kube-api-access-d9xqz\") pod \"722ee392-b285-4b3f-9e61-034d352069a0\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.589089 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhw7q\" (UniqueName: \"kubernetes.io/projected/38596e39-b23f-4670-8927-c8cab809a25b-kube-api-access-jhw7q\") pod \"38596e39-b23f-4670-8927-c8cab809a25b\" (UID: \"38596e39-b23f-4670-8927-c8cab809a25b\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.589112 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/722ee392-b285-4b3f-9e61-034d352069a0-operator-scripts\") pod \"722ee392-b285-4b3f-9e61-034d352069a0\" (UID: \"722ee392-b285-4b3f-9e61-034d352069a0\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.589133 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szzd4\" (UniqueName: \"kubernetes.io/projected/7f2ed687-bc3f-4543-b5d5-6db15856198e-kube-api-access-szzd4\") pod \"7f2ed687-bc3f-4543-b5d5-6db15856198e\" (UID: \"7f2ed687-bc3f-4543-b5d5-6db15856198e\") " Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590090 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/722ee392-b285-4b3f-9e61-034d352069a0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "722ee392-b285-4b3f-9e61-034d352069a0" (UID: "722ee392-b285-4b3f-9e61-034d352069a0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590392 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/722ee392-b285-4b3f-9e61-034d352069a0-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590408 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590417 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f2ed687-bc3f-4543-b5d5-6db15856198e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590426 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7652f270-f57b-4e46-9171-79dcaa5975e0-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590434 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38596e39-b23f-4670-8927-c8cab809a25b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590443 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe5b470c-277c-413e-9377-a5cf2bfab33e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590454 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5x4vl\" (UniqueName: \"kubernetes.io/projected/fe5b470c-277c-413e-9377-a5cf2bfab33e-kube-api-access-5x4vl\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.590888 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-kube-api-access-qpzb6" (OuterVolumeSpecName: "kube-api-access-qpzb6") pod "dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb" (UID: "dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb"). InnerVolumeSpecName "kube-api-access-qpzb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.592658 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38596e39-b23f-4670-8927-c8cab809a25b-kube-api-access-jhw7q" (OuterVolumeSpecName: "kube-api-access-jhw7q") pod "38596e39-b23f-4670-8927-c8cab809a25b" (UID: "38596e39-b23f-4670-8927-c8cab809a25b"). InnerVolumeSpecName "kube-api-access-jhw7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.593219 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/722ee392-b285-4b3f-9e61-034d352069a0-kube-api-access-d9xqz" (OuterVolumeSpecName: "kube-api-access-d9xqz") pod "722ee392-b285-4b3f-9e61-034d352069a0" (UID: "722ee392-b285-4b3f-9e61-034d352069a0"). InnerVolumeSpecName "kube-api-access-d9xqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.593819 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7652f270-f57b-4e46-9171-79dcaa5975e0-kube-api-access-lb65d" (OuterVolumeSpecName: "kube-api-access-lb65d") pod "7652f270-f57b-4e46-9171-79dcaa5975e0" (UID: "7652f270-f57b-4e46-9171-79dcaa5975e0"). InnerVolumeSpecName "kube-api-access-lb65d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.594345 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f2ed687-bc3f-4543-b5d5-6db15856198e-kube-api-access-szzd4" (OuterVolumeSpecName: "kube-api-access-szzd4") pod "7f2ed687-bc3f-4543-b5d5-6db15856198e" (UID: "7f2ed687-bc3f-4543-b5d5-6db15856198e"). InnerVolumeSpecName "kube-api-access-szzd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.692470 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpzb6\" (UniqueName: \"kubernetes.io/projected/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb-kube-api-access-qpzb6\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.692503 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb65d\" (UniqueName: \"kubernetes.io/projected/7652f270-f57b-4e46-9171-79dcaa5975e0-kube-api-access-lb65d\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.692512 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9xqz\" (UniqueName: \"kubernetes.io/projected/722ee392-b285-4b3f-9e61-034d352069a0-kube-api-access-d9xqz\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.692521 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhw7q\" (UniqueName: \"kubernetes.io/projected/38596e39-b23f-4670-8927-c8cab809a25b-kube-api-access-jhw7q\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.692529 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szzd4\" (UniqueName: \"kubernetes.io/projected/7f2ed687-bc3f-4543-b5d5-6db15856198e-kube-api-access-szzd4\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.695093 4747 generic.go:334] "Generic (PLEG): container finished" podID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerID="4017961c9d3c9c26b4cf373b2e9a3699867f8bd938474db5a5bca63d01f3c3d3" exitCode=0 Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.695169 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qk4l8" event={"ID":"dd031753-0dc4-4ce4-be69-2d28f88f008b","Type":"ContainerDied","Data":"4017961c9d3c9c26b4cf373b2e9a3699867f8bd938474db5a5bca63d01f3c3d3"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.697639 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n9vgl" event={"ID":"7f2ed687-bc3f-4543-b5d5-6db15856198e","Type":"ContainerDied","Data":"db9c127e8082cecb86c5cd99870c97564156982f656e85b180a6a68ffa6e7e86"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.697667 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db9c127e8082cecb86c5cd99870c97564156982f656e85b180a6a68ffa6e7e86" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.697720 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n9vgl" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.700446 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cd8b-account-create-update-4jdhl" event={"ID":"fe5b470c-277c-413e-9377-a5cf2bfab33e","Type":"ContainerDied","Data":"cd5adb6432a6a8bc61c21f0e5753ed0c05f0f12a72702cb85e06a155a4d0a4df"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.700484 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd5adb6432a6a8bc61c21f0e5753ed0c05f0f12a72702cb85e06a155a4d0a4df" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.700483 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cd8b-account-create-update-4jdhl" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.702677 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bd6f7" event={"ID":"7652f270-f57b-4e46-9171-79dcaa5975e0","Type":"ContainerDied","Data":"1df0d964b82a997bcc5266349417d0689782eae2a42b4007e67f3668b6547c2c"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.702706 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bd6f7" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.702711 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1df0d964b82a997bcc5266349417d0689782eae2a42b4007e67f3668b6547c2c" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.704172 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cbf0-account-create-update-bfwtn" event={"ID":"dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb","Type":"ContainerDied","Data":"144bfc2ecd8356a1e640bcc7c43fa280608fbd4706c59edac62b069ead9e3fc4"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.704196 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="144bfc2ecd8356a1e640bcc7c43fa280608fbd4706c59edac62b069ead9e3fc4" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.704234 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cbf0-account-create-update-bfwtn" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.706304 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-f497-account-create-update-79mgp" event={"ID":"38596e39-b23f-4670-8927-c8cab809a25b","Type":"ContainerDied","Data":"352d9940f08321b45bdd324e49050c853c5521e5eb435eb6dbfba743d8357b45"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.706438 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="352d9940f08321b45bdd324e49050c853c5521e5eb435eb6dbfba743d8357b45" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.706555 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-f497-account-create-update-79mgp" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.717960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vnmfj" event={"ID":"722ee392-b285-4b3f-9e61-034d352069a0","Type":"ContainerDied","Data":"841ea0ec5d229d48acbbbf406142bd86e9af0d2257d1627020efb27f64f66cf9"} Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.717994 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="841ea0ec5d229d48acbbbf406142bd86e9af0d2257d1627020efb27f64f66cf9" Feb 02 09:11:23 crc kubenswrapper[4747]: I0202 09:11:23.718026 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vnmfj" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.028240 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.099407 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-config\") pod \"dd031753-0dc4-4ce4-be69-2d28f88f008b\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.099480 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-nb\") pod \"dd031753-0dc4-4ce4-be69-2d28f88f008b\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.099576 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-dns-svc\") pod \"dd031753-0dc4-4ce4-be69-2d28f88f008b\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.099634 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-sb\") pod \"dd031753-0dc4-4ce4-be69-2d28f88f008b\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.099661 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbndg\" (UniqueName: \"kubernetes.io/projected/dd031753-0dc4-4ce4-be69-2d28f88f008b-kube-api-access-dbndg\") pod \"dd031753-0dc4-4ce4-be69-2d28f88f008b\" (UID: \"dd031753-0dc4-4ce4-be69-2d28f88f008b\") " Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.107128 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd031753-0dc4-4ce4-be69-2d28f88f008b-kube-api-access-dbndg" (OuterVolumeSpecName: "kube-api-access-dbndg") pod "dd031753-0dc4-4ce4-be69-2d28f88f008b" (UID: "dd031753-0dc4-4ce4-be69-2d28f88f008b"). InnerVolumeSpecName "kube-api-access-dbndg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.145527 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dd031753-0dc4-4ce4-be69-2d28f88f008b" (UID: "dd031753-0dc4-4ce4-be69-2d28f88f008b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.146239 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dd031753-0dc4-4ce4-be69-2d28f88f008b" (UID: "dd031753-0dc4-4ce4-be69-2d28f88f008b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.151574 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-config" (OuterVolumeSpecName: "config") pod "dd031753-0dc4-4ce4-be69-2d28f88f008b" (UID: "dd031753-0dc4-4ce4-be69-2d28f88f008b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.159470 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dd031753-0dc4-4ce4-be69-2d28f88f008b" (UID: "dd031753-0dc4-4ce4-be69-2d28f88f008b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.202235 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.202274 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.202287 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.202300 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbndg\" (UniqueName: \"kubernetes.io/projected/dd031753-0dc4-4ce4-be69-2d28f88f008b-kube-api-access-dbndg\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.202315 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd031753-0dc4-4ce4-be69-2d28f88f008b-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.741970 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"74f29a291f72a2fa6879953f65d67fc91db226c1bb5295b530fcac2c3b02c077"} Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.742040 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"a9f325b121231b5ea021b3221574bbdb51cf67488a77f94dcb6e70b42c35cb7b"} Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.746000 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qk4l8" event={"ID":"dd031753-0dc4-4ce4-be69-2d28f88f008b","Type":"ContainerDied","Data":"c1f5fc8be24b6e592ef435bccc95bf6864562e15f363723d4a85b250a17e4b64"} Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.746078 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qk4l8" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.746188 4747 scope.go:117] "RemoveContainer" containerID="4017961c9d3c9c26b4cf373b2e9a3699867f8bd938474db5a5bca63d01f3c3d3" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.748096 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-c8j7m" event={"ID":"a92c859d-6661-4ed3-888b-267a50ed2894","Type":"ContainerStarted","Data":"d83e892d38c647f9a5b0aa92a9384ff2abe4189ec3191e109473c1243bd446ac"} Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.772042 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-c8j7m" podStartSLOduration=3.006086375 podStartE2EDuration="7.772014025s" podCreationTimestamp="2026-02-02 09:11:17 +0000 UTC" firstStartedPulling="2026-02-02 09:11:18.377823841 +0000 UTC m=+890.922162274" lastFinishedPulling="2026-02-02 09:11:23.143751491 +0000 UTC m=+895.688089924" observedRunningTime="2026-02-02 09:11:24.766400375 +0000 UTC m=+897.310738818" watchObservedRunningTime="2026-02-02 09:11:24.772014025 +0000 UTC m=+897.316352498" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.775013 4747 scope.go:117] "RemoveContainer" containerID="be82b833501e5118114b4aa3903fd2dff8149e9740a5eba40439c63e200b2cc0" Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.798488 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qk4l8"] Feb 02 09:11:24 crc kubenswrapper[4747]: I0202 09:11:24.804621 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qk4l8"] Feb 02 09:11:25 crc kubenswrapper[4747]: I0202 09:11:25.765242 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"ed1f51ef23d36ba7f0b9d41dbab718b3ba74e5d528b004052ca5754c3d6f78e2"} Feb 02 09:11:25 crc kubenswrapper[4747]: I0202 09:11:25.765832 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"bd99785dde8f45bae139c47a6b97e02a526c7276a834f1a4e998985a9bb53e89"} Feb 02 09:11:25 crc kubenswrapper[4747]: I0202 09:11:25.765853 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"940d9520cfc901bbc7b61721ff060535b20ed8fe59e492a03216da75cb893dc6"} Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.402262 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd031753-0dc4-4ce4-be69-2d28f88f008b" path="/var/lib/kubelet/pods/dd031753-0dc4-4ce4-be69-2d28f88f008b/volumes" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403261 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b58ss"] Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403557 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5b470c-277c-413e-9377-a5cf2bfab33e" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403571 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5b470c-277c-413e-9377-a5cf2bfab33e" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403583 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403591 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403601 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="722ee392-b285-4b3f-9e61-034d352069a0" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403610 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="722ee392-b285-4b3f-9e61-034d352069a0" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403621 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerName="init" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403628 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerName="init" Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403647 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38596e39-b23f-4670-8927-c8cab809a25b" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403654 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38596e39-b23f-4670-8927-c8cab809a25b" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403662 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f2ed687-bc3f-4543-b5d5-6db15856198e" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403669 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f2ed687-bc3f-4543-b5d5-6db15856198e" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403686 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7652f270-f57b-4e46-9171-79dcaa5975e0" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403694 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7652f270-f57b-4e46-9171-79dcaa5975e0" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: E0202 09:11:26.403707 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerName="dnsmasq-dns" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403713 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerName="dnsmasq-dns" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403884 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403899 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd031753-0dc4-4ce4-be69-2d28f88f008b" containerName="dnsmasq-dns" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403907 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38596e39-b23f-4670-8927-c8cab809a25b" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403918 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f2ed687-bc3f-4543-b5d5-6db15856198e" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403949 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="722ee392-b285-4b3f-9e61-034d352069a0" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403963 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7652f270-f57b-4e46-9171-79dcaa5975e0" containerName="mariadb-database-create" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.403979 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe5b470c-277c-413e-9377-a5cf2bfab33e" containerName="mariadb-account-create-update" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.405628 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b58ss"] Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.405737 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.447172 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-catalog-content\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.447218 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-utilities\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.447355 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vxw9\" (UniqueName: \"kubernetes.io/projected/e35ebb79-3f29-4695-af28-732b49057cfa-kube-api-access-5vxw9\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.549558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vxw9\" (UniqueName: \"kubernetes.io/projected/e35ebb79-3f29-4695-af28-732b49057cfa-kube-api-access-5vxw9\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.549656 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-catalog-content\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.549684 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-utilities\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.550250 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-utilities\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.550379 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-catalog-content\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.587677 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vxw9\" (UniqueName: \"kubernetes.io/projected/e35ebb79-3f29-4695-af28-732b49057cfa-kube-api-access-5vxw9\") pod \"community-operators-b58ss\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.776970 4747 generic.go:334] "Generic (PLEG): container finished" podID="a92c859d-6661-4ed3-888b-267a50ed2894" containerID="d83e892d38c647f9a5b0aa92a9384ff2abe4189ec3191e109473c1243bd446ac" exitCode=0 Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.777914 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-c8j7m" event={"ID":"a92c859d-6661-4ed3-888b-267a50ed2894","Type":"ContainerDied","Data":"d83e892d38c647f9a5b0aa92a9384ff2abe4189ec3191e109473c1243bd446ac"} Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.785416 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"e652a157820486f6c04065d7cf98f8f6d0eafd0aac34175744f42cab68626799"} Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.785635 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"bf5d9b1eb50d4c58356dec97b62d7316ce31ce9950fbae5e029460670dbbf619"} Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.785750 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"51bb6d9108271e2dd32f1818d3379b5f82938d9c29a71262eda4d56f2e70ab08"} Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.785852 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30a3d44f-2ad1-4d00-824e-1e1cdaa048ad","Type":"ContainerStarted","Data":"45e0858532fd846d192ba3a230bad9eb68e427b974417279097a95215190b4b4"} Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.803526 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:26 crc kubenswrapper[4747]: I0202 09:11:26.837357 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=35.429816684 podStartE2EDuration="44.837339963s" podCreationTimestamp="2026-02-02 09:10:42 +0000 UTC" firstStartedPulling="2026-02-02 09:11:15.740766432 +0000 UTC m=+888.285104865" lastFinishedPulling="2026-02-02 09:11:25.148289681 +0000 UTC m=+897.692628144" observedRunningTime="2026-02-02 09:11:26.8324126 +0000 UTC m=+899.376751033" watchObservedRunningTime="2026-02-02 09:11:26.837339963 +0000 UTC m=+899.381678396" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.108606 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-4qt96"] Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.110213 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.112887 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.120229 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-4qt96"] Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.160395 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.160487 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.160544 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.160641 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-config\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.160679 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kk7l\" (UniqueName: \"kubernetes.io/projected/8644e1e5-1e9d-4463-8ea6-d5717be8832b-kube-api-access-5kk7l\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.160761 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: W0202 09:11:27.242124 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode35ebb79_3f29_4695_af28_732b49057cfa.slice/crio-374586f7eaf4c4a7099f7987d2f6810e13dfb4864d39d93ccad40a553b8618c7 WatchSource:0}: Error finding container 374586f7eaf4c4a7099f7987d2f6810e13dfb4864d39d93ccad40a553b8618c7: Status 404 returned error can't find the container with id 374586f7eaf4c4a7099f7987d2f6810e13dfb4864d39d93ccad40a553b8618c7 Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.243509 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b58ss"] Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.262249 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-config\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.262329 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kk7l\" (UniqueName: \"kubernetes.io/projected/8644e1e5-1e9d-4463-8ea6-d5717be8832b-kube-api-access-5kk7l\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.262403 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.262452 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.262533 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.262594 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.264014 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.264243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-config\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.264314 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.264536 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.264792 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.284056 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kk7l\" (UniqueName: \"kubernetes.io/projected/8644e1e5-1e9d-4463-8ea6-d5717be8832b-kube-api-access-5kk7l\") pod \"dnsmasq-dns-7ff5475cc9-4qt96\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.427792 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.805282 4747 generic.go:334] "Generic (PLEG): container finished" podID="e35ebb79-3f29-4695-af28-732b49057cfa" containerID="80870d765b89b8d9deff47e18d2833e0d8db4d124169452a2c5683ebd7d338e4" exitCode=0 Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.805344 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b58ss" event={"ID":"e35ebb79-3f29-4695-af28-732b49057cfa","Type":"ContainerDied","Data":"80870d765b89b8d9deff47e18d2833e0d8db4d124169452a2c5683ebd7d338e4"} Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.805423 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b58ss" event={"ID":"e35ebb79-3f29-4695-af28-732b49057cfa","Type":"ContainerStarted","Data":"374586f7eaf4c4a7099f7987d2f6810e13dfb4864d39d93ccad40a553b8618c7"} Feb 02 09:11:27 crc kubenswrapper[4747]: I0202 09:11:27.934219 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-4qt96"] Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.107802 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.279697 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-combined-ca-bundle\") pod \"a92c859d-6661-4ed3-888b-267a50ed2894\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.280250 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xfjd\" (UniqueName: \"kubernetes.io/projected/a92c859d-6661-4ed3-888b-267a50ed2894-kube-api-access-2xfjd\") pod \"a92c859d-6661-4ed3-888b-267a50ed2894\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.280473 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-config-data\") pod \"a92c859d-6661-4ed3-888b-267a50ed2894\" (UID: \"a92c859d-6661-4ed3-888b-267a50ed2894\") " Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.286095 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a92c859d-6661-4ed3-888b-267a50ed2894-kube-api-access-2xfjd" (OuterVolumeSpecName: "kube-api-access-2xfjd") pod "a92c859d-6661-4ed3-888b-267a50ed2894" (UID: "a92c859d-6661-4ed3-888b-267a50ed2894"). InnerVolumeSpecName "kube-api-access-2xfjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.320306 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a92c859d-6661-4ed3-888b-267a50ed2894" (UID: "a92c859d-6661-4ed3-888b-267a50ed2894"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.342727 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-config-data" (OuterVolumeSpecName: "config-data") pod "a92c859d-6661-4ed3-888b-267a50ed2894" (UID: "a92c859d-6661-4ed3-888b-267a50ed2894"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.385688 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xfjd\" (UniqueName: \"kubernetes.io/projected/a92c859d-6661-4ed3-888b-267a50ed2894-kube-api-access-2xfjd\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.385741 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.385761 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92c859d-6661-4ed3-888b-267a50ed2894-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.824721 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b58ss" event={"ID":"e35ebb79-3f29-4695-af28-732b49057cfa","Type":"ContainerStarted","Data":"0a5008083d60401a9a812da56ee73da01bdedecdd7b5533f2d4e8407c4e999fd"} Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.829204 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-c8j7m" event={"ID":"a92c859d-6661-4ed3-888b-267a50ed2894","Type":"ContainerDied","Data":"1276c63125c1bfc37342609b25384d756f97d751835f2b744e76bf5a598a2333"} Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.829252 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1276c63125c1bfc37342609b25384d756f97d751835f2b744e76bf5a598a2333" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.829252 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-c8j7m" Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.835179 4747 generic.go:334] "Generic (PLEG): container finished" podID="8644e1e5-1e9d-4463-8ea6-d5717be8832b" containerID="e005f09e56dcd0fbad547acb1356ced591b36665b25cf530dfe0210ea3889140" exitCode=0 Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.835435 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" event={"ID":"8644e1e5-1e9d-4463-8ea6-d5717be8832b","Type":"ContainerDied","Data":"e005f09e56dcd0fbad547acb1356ced591b36665b25cf530dfe0210ea3889140"} Feb 02 09:11:28 crc kubenswrapper[4747]: I0202 09:11:28.835574 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" event={"ID":"8644e1e5-1e9d-4463-8ea6-d5717be8832b","Type":"ContainerStarted","Data":"847ea48e39e8f69ab55fe0a154c0e9f2d9a8042b6227a10282c91bd72afa3537"} Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.050051 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-xdgq9"] Feb 02 09:11:29 crc kubenswrapper[4747]: E0202 09:11:29.050561 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a92c859d-6661-4ed3-888b-267a50ed2894" containerName="keystone-db-sync" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.050587 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a92c859d-6661-4ed3-888b-267a50ed2894" containerName="keystone-db-sync" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.050813 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a92c859d-6661-4ed3-888b-267a50ed2894" containerName="keystone-db-sync" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.051572 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.058824 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.059233 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hr2fd" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.061128 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.062289 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.062815 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.081017 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-4qt96"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.097145 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-combined-ca-bundle\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.098212 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-fernet-keys\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.098322 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-credential-keys\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.098434 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v48n7\" (UniqueName: \"kubernetes.io/projected/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-kube-api-access-v48n7\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.098603 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-scripts\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.098690 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-config-data\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.098839 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xdgq9"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.122411 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-vptvj"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.130648 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.139178 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-vptvj"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.206566 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-scripts\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.206625 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-config-data\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.206673 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-combined-ca-bundle\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.206697 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-fernet-keys\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.206722 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-credential-keys\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.206767 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v48n7\" (UniqueName: \"kubernetes.io/projected/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-kube-api-access-v48n7\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.214357 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-credential-keys\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.216698 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-scripts\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.229195 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-fernet-keys\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.229169 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-config-data\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.249201 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-combined-ca-bundle\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.306078 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-2vk6l"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.307514 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.308078 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.308127 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.308159 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ln8w\" (UniqueName: \"kubernetes.io/projected/f4b9c987-e452-44a7-a538-eaa77c661ea3-kube-api-access-6ln8w\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.308242 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-config\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.308266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.308297 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.309310 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v48n7\" (UniqueName: \"kubernetes.io/projected/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-kube-api-access-v48n7\") pod \"keystone-bootstrap-xdgq9\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.354781 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.354947 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-65cfbfbb97-gsrk6"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.355084 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.356127 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.357278 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-wdwjv" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.388027 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2vk6l"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.399849 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.400158 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.400358 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.400494 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-2dbw7" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.410115 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-65cfbfbb97-gsrk6"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416651 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxxbc\" (UniqueName: \"kubernetes.io/projected/1079c308-67ae-4be0-903d-49b2a0f0aa59-kube-api-access-zxxbc\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416730 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-config\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416759 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416787 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416815 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-config\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416868 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-combined-ca-bundle\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416909 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416946 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.416969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ln8w\" (UniqueName: \"kubernetes.io/projected/f4b9c987-e452-44a7-a538-eaa77c661ea3-kube-api-access-6ln8w\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.418060 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.418483 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.418786 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-config\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.419056 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.423717 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.442392 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.501788 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ln8w\" (UniqueName: \"kubernetes.io/projected/f4b9c987-e452-44a7-a538-eaa77c661ea3-kube-api-access-6ln8w\") pod \"dnsmasq-dns-5c5cc7c5ff-vptvj\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.521455 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-config\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.521700 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-config-data\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.521815 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-combined-ca-bundle\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.522188 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbgdb\" (UniqueName: \"kubernetes.io/projected/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-kube-api-access-jbgdb\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.522302 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-horizon-secret-key\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.522412 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-logs\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.522506 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxxbc\" (UniqueName: \"kubernetes.io/projected/1079c308-67ae-4be0-903d-49b2a0f0aa59-kube-api-access-zxxbc\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.522668 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-scripts\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.528128 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-config\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.532126 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-combined-ca-bundle\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.547656 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.549753 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.557419 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.557642 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.570340 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-f57c69f85-bvxh8"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.571625 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.587884 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxxbc\" (UniqueName: \"kubernetes.io/projected/1079c308-67ae-4be0-903d-49b2a0f0aa59-kube-api-access-zxxbc\") pod \"neutron-db-sync-2vk6l\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.590728 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f57c69f85-bvxh8"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.611098 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.635523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-config-data\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.635596 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbgdb\" (UniqueName: \"kubernetes.io/projected/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-kube-api-access-jbgdb\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.635651 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-horizon-secret-key\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.635689 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-logs\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.635715 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-scripts\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.636454 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-scripts\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.637458 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-config-data\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.642711 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-logs\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.651616 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-7pc6t"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.654251 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.658423 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mjpkv" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.659628 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.663759 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.687232 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7pc6t"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.695282 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.696244 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-horizon-secret-key\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.719985 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbgdb\" (UniqueName: \"kubernetes.io/projected/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-kube-api-access-jbgdb\") pod \"horizon-65cfbfbb97-gsrk6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.728840 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.737625 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-config-data\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.737680 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739324 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-scripts\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739371 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pth77\" (UniqueName: \"kubernetes.io/projected/0df43a11-4442-4371-bdb3-b49610cefdc0-kube-api-access-pth77\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739448 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98633938-4be5-489e-8e19-b23502ef4a40-horizon-secret-key\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739464 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47l8l\" (UniqueName: \"kubernetes.io/projected/98633938-4be5-489e-8e19-b23502ef4a40-kube-api-access-47l8l\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739531 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739553 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-config-data\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739576 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-log-httpd\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739887 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-run-httpd\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739917 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-scripts\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.739950 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98633938-4be5-489e-8e19-b23502ef4a40-logs\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: E0202 09:11:29.750408 4747 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Feb 02 09:11:29 crc kubenswrapper[4747]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/8644e1e5-1e9d-4463-8ea6-d5717be8832b/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 02 09:11:29 crc kubenswrapper[4747]: > podSandboxID="847ea48e39e8f69ab55fe0a154c0e9f2d9a8042b6227a10282c91bd72afa3537" Feb 02 09:11:29 crc kubenswrapper[4747]: E0202 09:11:29.750754 4747 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 02 09:11:29 crc kubenswrapper[4747]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n64h68fh95h595h67fh597hfch57ch68fh5ffh6hf4h689h659h569h65bh67bh65dh594h64h5d6hd8h5bfh9fh5c4h676h5cdh56h8bh569h664h645q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5kk7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7ff5475cc9-4qt96_openstack(8644e1e5-1e9d-4463-8ea6-d5717be8832b): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/8644e1e5-1e9d-4463-8ea6-d5717be8832b/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 02 09:11:29 crc kubenswrapper[4747]: > logger="UnhandledError" Feb 02 09:11:29 crc kubenswrapper[4747]: E0202 09:11:29.752033 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/8644e1e5-1e9d-4463-8ea6-d5717be8832b/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" podUID="8644e1e5-1e9d-4463-8ea6-d5717be8832b" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.764098 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-sh4ft"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.765152 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.773183 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-h5hpn" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.773474 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.774096 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.781493 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-kcg7d"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.783544 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.791085 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-kcg7d"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.796269 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.796355 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.796382 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-xfd9p" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847061 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98633938-4be5-489e-8e19-b23502ef4a40-horizon-secret-key\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847128 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47l8l\" (UniqueName: \"kubernetes.io/projected/98633938-4be5-489e-8e19-b23502ef4a40-kube-api-access-47l8l\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847166 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx2xp\" (UniqueName: \"kubernetes.io/projected/b3486730-26af-4fe1-a379-b28af74eb1e8-kube-api-access-lx2xp\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847242 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847300 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-config-data\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847347 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-log-httpd\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847369 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-db-sync-config-data\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847435 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-run-httpd\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847460 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-scripts\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847484 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98633938-4be5-489e-8e19-b23502ef4a40-logs\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847536 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-config-data\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847588 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847613 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-scripts\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847637 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pth77\" (UniqueName: \"kubernetes.io/projected/0df43a11-4442-4371-bdb3-b49610cefdc0-kube-api-access-pth77\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.847693 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-combined-ca-bundle\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.851077 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-scripts\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.851101 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-config-data\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.851151 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-log-httpd\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.851471 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98633938-4be5-489e-8e19-b23502ef4a40-logs\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.853975 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-run-httpd\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.869719 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.870420 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-scripts\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.870557 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98633938-4be5-489e-8e19-b23502ef4a40-horizon-secret-key\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.870655 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.874957 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sh4ft"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.876196 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-config-data\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.885135 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pth77\" (UniqueName: \"kubernetes.io/projected/0df43a11-4442-4371-bdb3-b49610cefdc0-kube-api-access-pth77\") pod \"ceilometer-0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.888880 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47l8l\" (UniqueName: \"kubernetes.io/projected/98633938-4be5-489e-8e19-b23502ef4a40-kube-api-access-47l8l\") pod \"horizon-f57c69f85-bvxh8\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.891698 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.902373 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.930199 4747 generic.go:334] "Generic (PLEG): container finished" podID="e35ebb79-3f29-4695-af28-732b49057cfa" containerID="0a5008083d60401a9a812da56ee73da01bdedecdd7b5533f2d4e8407c4e999fd" exitCode=0 Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.930901 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b58ss" event={"ID":"e35ebb79-3f29-4695-af28-732b49057cfa","Type":"ContainerDied","Data":"0a5008083d60401a9a812da56ee73da01bdedecdd7b5533f2d4e8407c4e999fd"} Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.948925 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx2xp\" (UniqueName: \"kubernetes.io/projected/b3486730-26af-4fe1-a379-b28af74eb1e8-kube-api-access-lx2xp\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949003 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-scripts\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949047 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11768358-bd3c-440c-ac71-1c1ad4436571-etc-machine-id\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949093 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-db-sync-config-data\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949122 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2551c83c-96c3-43d9-916d-04bf8bbaf85a-logs\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949140 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-config-data\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949165 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-db-sync-config-data\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949193 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-scripts\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949210 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-combined-ca-bundle\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949234 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-config-data\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949256 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-combined-ca-bundle\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949296 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv9m4\" (UniqueName: \"kubernetes.io/projected/11768358-bd3c-440c-ac71-1c1ad4436571-kube-api-access-pv9m4\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949345 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-combined-ca-bundle\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.949370 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnlbp\" (UniqueName: \"kubernetes.io/projected/2551c83c-96c3-43d9-916d-04bf8bbaf85a-kube-api-access-vnlbp\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.955911 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-vptvj"] Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.957437 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-combined-ca-bundle\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.960294 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-db-sync-config-data\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:29 crc kubenswrapper[4747]: I0202 09:11:29.982276 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx2xp\" (UniqueName: \"kubernetes.io/projected/b3486730-26af-4fe1-a379-b28af74eb1e8-kube-api-access-lx2xp\") pod \"barbican-db-sync-7pc6t\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.017974 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-ft6lc"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.021188 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050510 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-config-data\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050548 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2551c83c-96c3-43d9-916d-04bf8bbaf85a-logs\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-db-sync-config-data\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050590 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-scripts\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050607 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-combined-ca-bundle\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-config-data\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050642 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-combined-ca-bundle\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050675 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv9m4\" (UniqueName: \"kubernetes.io/projected/11768358-bd3c-440c-ac71-1c1ad4436571-kube-api-access-pv9m4\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050731 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnlbp\" (UniqueName: \"kubernetes.io/projected/2551c83c-96c3-43d9-916d-04bf8bbaf85a-kube-api-access-vnlbp\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050763 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-scripts\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050789 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11768358-bd3c-440c-ac71-1c1ad4436571-etc-machine-id\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.050875 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11768358-bd3c-440c-ac71-1c1ad4436571-etc-machine-id\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.054345 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.054419 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2551c83c-96c3-43d9-916d-04bf8bbaf85a-logs\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.054533 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-config-data\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.064272 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-ft6lc"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.066482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-scripts\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.068111 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-scripts\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.077627 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-combined-ca-bundle\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.078052 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-config-data\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.078376 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-db-sync-config-data\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.078504 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-combined-ca-bundle\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.085067 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.086851 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.087481 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv9m4\" (UniqueName: \"kubernetes.io/projected/11768358-bd3c-440c-ac71-1c1ad4436571-kube-api-access-pv9m4\") pod \"cinder-db-sync-sh4ft\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.091664 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnlbp\" (UniqueName: \"kubernetes.io/projected/2551c83c-96c3-43d9-916d-04bf8bbaf85a-kube-api-access-vnlbp\") pod \"placement-db-sync-kcg7d\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.092412 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.092594 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.092729 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.092898 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2v6s8" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.101613 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.126678 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.156481 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.156548 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.156616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.156673 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.156701 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdn7h\" (UniqueName: \"kubernetes.io/projected/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-kube-api-access-sdn7h\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.156808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-config\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.169407 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kcg7d" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.258571 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-config\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.258947 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.258985 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-config-data\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259067 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-scripts\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259084 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259104 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259155 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259185 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdn7h\" (UniqueName: \"kubernetes.io/projected/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-kube-api-access-sdn7h\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259217 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259256 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259276 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxrhl\" (UniqueName: \"kubernetes.io/projected/8bbcc3f0-dc53-4b02-a05c-d57a54874477-kube-api-access-jxrhl\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.259321 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-logs\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.261327 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.261334 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-config\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.261467 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.261910 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.264158 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.277965 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdn7h\" (UniqueName: \"kubernetes.io/projected/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-kube-api-access-sdn7h\") pod \"dnsmasq-dns-8b5c85b87-ft6lc\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.364738 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-config-data\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365162 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-scripts\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365193 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365265 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365305 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365358 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxrhl\" (UniqueName: \"kubernetes.io/projected/8bbcc3f0-dc53-4b02-a05c-d57a54874477-kube-api-access-jxrhl\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365431 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-logs\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.365909 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-logs\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.368507 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.368523 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.374495 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.385289 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-scripts\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.390820 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-config-data\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.406446 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxrhl\" (UniqueName: \"kubernetes.io/projected/8bbcc3f0-dc53-4b02-a05c-d57a54874477-kube-api-access-jxrhl\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.408911 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.421300 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.454850 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.456119 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.529039 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.571790 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.572071 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.574155 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.574409 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.577253 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xdgq9"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.685368 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-65cfbfbb97-gsrk6"] Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.685736 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.685778 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.685820 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.685839 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.685890 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.685963 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.686028 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.686050 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g97p4\" (UniqueName: \"kubernetes.io/projected/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-kube-api-access-g97p4\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.735228 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787177 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787248 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787291 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g97p4\" (UniqueName: \"kubernetes.io/projected/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-kube-api-access-g97p4\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787307 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787344 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787360 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787389 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.787403 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.789434 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.789559 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.789818 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.794167 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.795687 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.796203 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.796403 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.824286 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g97p4\" (UniqueName: \"kubernetes.io/projected/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-kube-api-access-g97p4\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.841359 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.890070 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-config\") pod \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.890127 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-sb\") pod \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.890154 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-svc\") pod \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.890221 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-swift-storage-0\") pod \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.890282 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kk7l\" (UniqueName: \"kubernetes.io/projected/8644e1e5-1e9d-4463-8ea6-d5717be8832b-kube-api-access-5kk7l\") pod \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.890308 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-nb\") pod \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\" (UID: \"8644e1e5-1e9d-4463-8ea6-d5717be8832b\") " Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.897572 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8644e1e5-1e9d-4463-8ea6-d5717be8832b-kube-api-access-5kk7l" (OuterVolumeSpecName: "kube-api-access-5kk7l") pod "8644e1e5-1e9d-4463-8ea6-d5717be8832b" (UID: "8644e1e5-1e9d-4463-8ea6-d5717be8832b"). InnerVolumeSpecName "kube-api-access-5kk7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.903658 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.945589 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.945645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-4qt96" event={"ID":"8644e1e5-1e9d-4463-8ea6-d5717be8832b","Type":"ContainerDied","Data":"847ea48e39e8f69ab55fe0a154c0e9f2d9a8042b6227a10282c91bd72afa3537"} Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.945723 4747 scope.go:117] "RemoveContainer" containerID="e005f09e56dcd0fbad547acb1356ced591b36665b25cf530dfe0210ea3889140" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.945888 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8644e1e5-1e9d-4463-8ea6-d5717be8832b" (UID: "8644e1e5-1e9d-4463-8ea6-d5717be8832b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.948495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65cfbfbb97-gsrk6" event={"ID":"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6","Type":"ContainerStarted","Data":"706c1975454c3ed8eff3c6bc936e1a8f14ee367a3805d1e45bca7d16f22b2e45"} Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.951782 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8644e1e5-1e9d-4463-8ea6-d5717be8832b" (UID: "8644e1e5-1e9d-4463-8ea6-d5717be8832b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.952632 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-config" (OuterVolumeSpecName: "config") pod "8644e1e5-1e9d-4463-8ea6-d5717be8832b" (UID: "8644e1e5-1e9d-4463-8ea6-d5717be8832b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.958537 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b58ss" event={"ID":"e35ebb79-3f29-4695-af28-732b49057cfa","Type":"ContainerStarted","Data":"d171b71c5c2623728ea715cc710d15de8b12fd8d99f9560093f8dc5d07435149"} Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.960975 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8644e1e5-1e9d-4463-8ea6-d5717be8832b" (UID: "8644e1e5-1e9d-4463-8ea6-d5717be8832b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.962227 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xdgq9" event={"ID":"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825","Type":"ContainerStarted","Data":"2e6d43ec000cb195198b0d8a511ea959a31f954fd296ff8f6cbfc2eb5fec4755"} Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.962259 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xdgq9" event={"ID":"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825","Type":"ContainerStarted","Data":"e10b7550f0848dd05fc3a2e7ebb2dd8f87c6c854afb13e5c361fdbc2e6552d56"} Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.965679 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8644e1e5-1e9d-4463-8ea6-d5717be8832b" (UID: "8644e1e5-1e9d-4463-8ea6-d5717be8832b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:30 crc kubenswrapper[4747]: I0202 09:11:30.991237 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b58ss" podStartSLOduration=2.096280165 podStartE2EDuration="4.991218686s" podCreationTimestamp="2026-02-02 09:11:26 +0000 UTC" firstStartedPulling="2026-02-02 09:11:27.809279466 +0000 UTC m=+900.353617909" lastFinishedPulling="2026-02-02 09:11:30.704217997 +0000 UTC m=+903.248556430" observedRunningTime="2026-02-02 09:11:30.981250058 +0000 UTC m=+903.525588491" watchObservedRunningTime="2026-02-02 09:11:30.991218686 +0000 UTC m=+903.535557119" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.000048 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.000091 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kk7l\" (UniqueName: \"kubernetes.io/projected/8644e1e5-1e9d-4463-8ea6-d5717be8832b-kube-api-access-5kk7l\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.000106 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.000119 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.000132 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.000144 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8644e1e5-1e9d-4463-8ea6-d5717be8832b-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.029578 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-xdgq9" podStartSLOduration=2.029555572 podStartE2EDuration="2.029555572s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:31.017848839 +0000 UTC m=+903.562187272" watchObservedRunningTime="2026-02-02 09:11:31.029555572 +0000 UTC m=+903.573894005" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.108526 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7pc6t"] Feb 02 09:11:31 crc kubenswrapper[4747]: W0202 09:11:31.147960 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1079c308_67ae_4be0_903d_49b2a0f0aa59.slice/crio-3ada819319b546301286f1e59a5f3badcd9f31a98f4d8945b10f88fb139e798c WatchSource:0}: Error finding container 3ada819319b546301286f1e59a5f3badcd9f31a98f4d8945b10f88fb139e798c: Status 404 returned error can't find the container with id 3ada819319b546301286f1e59a5f3badcd9f31a98f4d8945b10f88fb139e798c Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.154142 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2vk6l"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.187423 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.207412 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-65cfbfbb97-gsrk6"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.215010 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-vptvj"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.229779 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.241150 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7dcdbf6f-bvzkf"] Feb 02 09:11:31 crc kubenswrapper[4747]: E0202 09:11:31.241495 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8644e1e5-1e9d-4463-8ea6-d5717be8832b" containerName="init" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.241507 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8644e1e5-1e9d-4463-8ea6-d5717be8832b" containerName="init" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.241690 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8644e1e5-1e9d-4463-8ea6-d5717be8832b" containerName="init" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.242829 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.256043 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7dcdbf6f-bvzkf"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.294434 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f57c69f85-bvxh8"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.304001 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8784z\" (UniqueName: \"kubernetes.io/projected/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-kube-api-access-8784z\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.304065 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-horizon-secret-key\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.304102 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-config-data\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.304130 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-scripts\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.304185 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-logs\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.335400 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sh4ft"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.367598 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.380990 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-kcg7d"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.405433 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-horizon-secret-key\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.405531 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-config-data\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.405570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-scripts\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.405653 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-logs\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.405736 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8784z\" (UniqueName: \"kubernetes.io/projected/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-kube-api-access-8784z\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.409144 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-scripts\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.409380 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-logs\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.409797 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-config-data\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.422733 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-horizon-secret-key\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.436833 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8784z\" (UniqueName: \"kubernetes.io/projected/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-kube-api-access-8784z\") pod \"horizon-7dcdbf6f-bvzkf\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.483062 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-4qt96"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.497709 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-4qt96"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.515090 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.531954 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-ft6lc"] Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.607653 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:11:31 crc kubenswrapper[4747]: W0202 09:11:31.610205 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e0f1b52_714c_49f2_a734_30de1ef5c0d4.slice/crio-7665c3f760e39783bb27ce808ee1cecdfdfa95e535de4bb658da4a4f75501cf5 WatchSource:0}: Error finding container 7665c3f760e39783bb27ce808ee1cecdfdfa95e535de4bb658da4a4f75501cf5: Status 404 returned error can't find the container with id 7665c3f760e39783bb27ce808ee1cecdfdfa95e535de4bb658da4a4f75501cf5 Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.736101 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:31 crc kubenswrapper[4747]: I0202 09:11:31.997645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kcg7d" event={"ID":"2551c83c-96c3-43d9-916d-04bf8bbaf85a","Type":"ContainerStarted","Data":"03b786ca48e3acbefc0ee5b1535e102bfa1ce0f2358be2136bb5f3779423d091"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.007257 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vk6l" event={"ID":"1079c308-67ae-4be0-903d-49b2a0f0aa59","Type":"ContainerStarted","Data":"515c6e3d007f2d8d3b02472ebcc2f8d945b7fc435544430a8ea193ec7b7a90be"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.007337 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vk6l" event={"ID":"1079c308-67ae-4be0-903d-49b2a0f0aa59","Type":"ContainerStarted","Data":"3ada819319b546301286f1e59a5f3badcd9f31a98f4d8945b10f88fb139e798c"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.017725 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sh4ft" event={"ID":"11768358-bd3c-440c-ac71-1c1ad4436571","Type":"ContainerStarted","Data":"61a75b27ae044602808956d421dc01e3d1c7bc4707f98b8b04d904617d4db055"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.021497 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7pc6t" event={"ID":"b3486730-26af-4fe1-a379-b28af74eb1e8","Type":"ContainerStarted","Data":"9674817cc03b0ae7edce2dd028da029611818cd616e3e044d864f93235a7e17e"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.034736 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b9c987-e452-44a7-a538-eaa77c661ea3" containerID="4fc0a95beaee533487b3bcba363bc816ac72f971df8bffde3a7d840c37d95e50" exitCode=0 Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.034788 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" event={"ID":"f4b9c987-e452-44a7-a538-eaa77c661ea3","Type":"ContainerDied","Data":"4fc0a95beaee533487b3bcba363bc816ac72f971df8bffde3a7d840c37d95e50"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.034812 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" event={"ID":"f4b9c987-e452-44a7-a538-eaa77c661ea3","Type":"ContainerStarted","Data":"b9229aad53cf2d236b5ecfe72afe8210e78470aa48feca80c288f343e1a90405"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.037717 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e0f1b52-714c-49f2-a734-30de1ef5c0d4","Type":"ContainerStarted","Data":"7665c3f760e39783bb27ce808ee1cecdfdfa95e535de4bb658da4a4f75501cf5"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.041124 4747 generic.go:334] "Generic (PLEG): container finished" podID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerID="2f5dec28f6909a79ae567f2a054c882000337229cd0e10c6b805722e9f2fca2a" exitCode=0 Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.042071 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" event={"ID":"3ecf0f91-10b8-40bf-8b3f-55917d1002d2","Type":"ContainerDied","Data":"2f5dec28f6909a79ae567f2a054c882000337229cd0e10c6b805722e9f2fca2a"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.042197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" event={"ID":"3ecf0f91-10b8-40bf-8b3f-55917d1002d2","Type":"ContainerStarted","Data":"a909b211227c0631d2b8c5906b1232d331d601054983be16ecf21b8c12ce42b6"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.048429 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-2vk6l" podStartSLOduration=3.048408526 podStartE2EDuration="3.048408526s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:32.026073419 +0000 UTC m=+904.570411852" watchObservedRunningTime="2026-02-02 09:11:32.048408526 +0000 UTC m=+904.592746959" Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.049833 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f57c69f85-bvxh8" event={"ID":"98633938-4be5-489e-8e19-b23502ef4a40","Type":"ContainerStarted","Data":"a55ddc1d2880ad03dbe13ed9428263121bb0b00ca7160aa1669ee262ae3e105d"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.089609 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerStarted","Data":"c6bd4d79cc78ed2fc1542f968612c721f5dacb33c3e4faa711d500bc092ec38d"} Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.355154 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8644e1e5-1e9d-4463-8ea6-d5717be8832b" path="/var/lib/kubelet/pods/8644e1e5-1e9d-4463-8ea6-d5717be8832b/volumes" Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.382828 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7dcdbf6f-bvzkf"] Feb 02 09:11:32 crc kubenswrapper[4747]: I0202 09:11:32.490911 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.709505 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.753458 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ln8w\" (UniqueName: \"kubernetes.io/projected/f4b9c987-e452-44a7-a538-eaa77c661ea3-kube-api-access-6ln8w\") pod \"f4b9c987-e452-44a7-a538-eaa77c661ea3\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.753622 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-config\") pod \"f4b9c987-e452-44a7-a538-eaa77c661ea3\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.753654 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-svc\") pod \"f4b9c987-e452-44a7-a538-eaa77c661ea3\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.753708 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-nb\") pod \"f4b9c987-e452-44a7-a538-eaa77c661ea3\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.753762 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-swift-storage-0\") pod \"f4b9c987-e452-44a7-a538-eaa77c661ea3\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.753821 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-sb\") pod \"f4b9c987-e452-44a7-a538-eaa77c661ea3\" (UID: \"f4b9c987-e452-44a7-a538-eaa77c661ea3\") " Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.769120 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4b9c987-e452-44a7-a538-eaa77c661ea3-kube-api-access-6ln8w" (OuterVolumeSpecName: "kube-api-access-6ln8w") pod "f4b9c987-e452-44a7-a538-eaa77c661ea3" (UID: "f4b9c987-e452-44a7-a538-eaa77c661ea3"). InnerVolumeSpecName "kube-api-access-6ln8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.800490 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f4b9c987-e452-44a7-a538-eaa77c661ea3" (UID: "f4b9c987-e452-44a7-a538-eaa77c661ea3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.818673 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f4b9c987-e452-44a7-a538-eaa77c661ea3" (UID: "f4b9c987-e452-44a7-a538-eaa77c661ea3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.827339 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f4b9c987-e452-44a7-a538-eaa77c661ea3" (UID: "f4b9c987-e452-44a7-a538-eaa77c661ea3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.832024 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-config" (OuterVolumeSpecName: "config") pod "f4b9c987-e452-44a7-a538-eaa77c661ea3" (UID: "f4b9c987-e452-44a7-a538-eaa77c661ea3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.846538 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f4b9c987-e452-44a7-a538-eaa77c661ea3" (UID: "f4b9c987-e452-44a7-a538-eaa77c661ea3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.856536 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.856562 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.856572 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.856581 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.856590 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4b9c987-e452-44a7-a538-eaa77c661ea3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:32.856599 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ln8w\" (UniqueName: \"kubernetes.io/projected/f4b9c987-e452-44a7-a538-eaa77c661ea3-kube-api-access-6ln8w\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.228842 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" event={"ID":"f4b9c987-e452-44a7-a538-eaa77c661ea3","Type":"ContainerDied","Data":"b9229aad53cf2d236b5ecfe72afe8210e78470aa48feca80c288f343e1a90405"} Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.229356 4747 scope.go:117] "RemoveContainer" containerID="4fc0a95beaee533487b3bcba363bc816ac72f971df8bffde3a7d840c37d95e50" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.229174 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-vptvj" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.274391 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e0f1b52-714c-49f2-a734-30de1ef5c0d4","Type":"ContainerStarted","Data":"0b6e02fe0a534308f5085acf3ad53798a1102dc1b283307aff7170116c198c39"} Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.276421 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dcdbf6f-bvzkf" event={"ID":"12c81b21-3f11-4c58-b1ee-a23cf95e4db1","Type":"ContainerStarted","Data":"72c6affa8e5dd89821d5c454af0fdfafbaef0bdfb023b02dfaa12a71156f03f7"} Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.277536 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8bbcc3f0-dc53-4b02-a05c-d57a54874477","Type":"ContainerStarted","Data":"43fc6afb926cc8b4ec3fd590250468e0162dff9676e79fd237673f1e1a6941ef"} Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.281875 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" event={"ID":"3ecf0f91-10b8-40bf-8b3f-55917d1002d2","Type":"ContainerStarted","Data":"32ad24a5e0e49c42a7bdec6cefb4a9d070d02099eec449fe668dd627d86be33c"} Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.281904 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.332114 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-vptvj"] Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.349330 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-vptvj"] Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:33.373354 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" podStartSLOduration=4.373335485 podStartE2EDuration="4.373335485s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:33.371438378 +0000 UTC m=+905.915776811" watchObservedRunningTime="2026-02-02 09:11:33.373335485 +0000 UTC m=+905.917673908" Feb 02 09:11:35 crc kubenswrapper[4747]: I0202 09:11:34.380199 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b9c987-e452-44a7-a538-eaa77c661ea3" path="/var/lib/kubelet/pods/f4b9c987-e452-44a7-a538-eaa77c661ea3/volumes" Feb 02 09:11:36 crc kubenswrapper[4747]: I0202 09:11:36.803834 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:36 crc kubenswrapper[4747]: I0202 09:11:36.804158 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.329883 4747 generic.go:334] "Generic (PLEG): container finished" podID="bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" containerID="2e6d43ec000cb195198b0d8a511ea959a31f954fd296ff8f6cbfc2eb5fec4755" exitCode=0 Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.329985 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xdgq9" event={"ID":"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825","Type":"ContainerDied","Data":"2e6d43ec000cb195198b0d8a511ea959a31f954fd296ff8f6cbfc2eb5fec4755"} Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.332165 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8bbcc3f0-dc53-4b02-a05c-d57a54874477","Type":"ContainerStarted","Data":"f4ed110df3c22f3e6c05f3c6df1845789ec3fc836f6df0cba00de97f60c379fe"} Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.336240 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e0f1b52-714c-49f2-a734-30de1ef5c0d4","Type":"ContainerStarted","Data":"342f058c107e925c8426427eaaf05936a77b6725a60b4c27acfb67384778920b"} Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.336504 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-log" containerID="cri-o://0b6e02fe0a534308f5085acf3ad53798a1102dc1b283307aff7170116c198c39" gracePeriod=30 Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.336788 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-httpd" containerID="cri-o://342f058c107e925c8426427eaaf05936a77b6725a60b4c27acfb67384778920b" gracePeriod=30 Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.369588 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.369570106 podStartE2EDuration="8.369570106s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:37.363184057 +0000 UTC m=+909.907522500" watchObservedRunningTime="2026-02-02 09:11:37.369570106 +0000 UTC m=+909.913908539" Feb 02 09:11:37 crc kubenswrapper[4747]: I0202 09:11:37.862062 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-b58ss" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="registry-server" probeResult="failure" output=< Feb 02 09:11:37 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:11:37 crc kubenswrapper[4747]: > Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.146161 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f57c69f85-bvxh8"] Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.183027 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-69bf4987b8-zq2rd"] Feb 02 09:11:38 crc kubenswrapper[4747]: E0202 09:11:38.183395 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b9c987-e452-44a7-a538-eaa77c661ea3" containerName="init" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.183411 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b9c987-e452-44a7-a538-eaa77c661ea3" containerName="init" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.183572 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b9c987-e452-44a7-a538-eaa77c661ea3" containerName="init" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.184352 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.205166 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.212439 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69bf4987b8-zq2rd"] Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.265890 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-tls-certs\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.265954 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5prbt\" (UniqueName: \"kubernetes.io/projected/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-kube-api-access-5prbt\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.265996 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-scripts\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.266016 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-combined-ca-bundle\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.266050 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-secret-key\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.266070 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-logs\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.266091 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-config-data\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.368558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-tls-certs\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.368612 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5prbt\" (UniqueName: \"kubernetes.io/projected/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-kube-api-access-5prbt\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.368658 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-scripts\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.368685 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-combined-ca-bundle\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.368723 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-secret-key\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.368743 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-logs\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.368770 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-config-data\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.373369 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-scripts\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.374627 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-logs\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.380689 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-config-data\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.399895 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-combined-ca-bundle\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.400047 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-secret-key\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.400463 4747 generic.go:334] "Generic (PLEG): container finished" podID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerID="342f058c107e925c8426427eaaf05936a77b6725a60b4c27acfb67384778920b" exitCode=0 Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.400483 4747 generic.go:334] "Generic (PLEG): container finished" podID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerID="0b6e02fe0a534308f5085acf3ad53798a1102dc1b283307aff7170116c198c39" exitCode=143 Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.411333 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5prbt\" (UniqueName: \"kubernetes.io/projected/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-kube-api-access-5prbt\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.431053 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e0f1b52-714c-49f2-a734-30de1ef5c0d4","Type":"ContainerDied","Data":"342f058c107e925c8426427eaaf05936a77b6725a60b4c27acfb67384778920b"} Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.431102 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7dcdbf6f-bvzkf"] Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.431129 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e0f1b52-714c-49f2-a734-30de1ef5c0d4","Type":"ContainerDied","Data":"0b6e02fe0a534308f5085acf3ad53798a1102dc1b283307aff7170116c198c39"} Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.431508 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-log" containerID="cri-o://f4ed110df3c22f3e6c05f3c6df1845789ec3fc836f6df0cba00de97f60c379fe" gracePeriod=30 Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.431681 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8bbcc3f0-dc53-4b02-a05c-d57a54874477","Type":"ContainerStarted","Data":"3ed7fe292e21441ba342c247adcd5a75f3e2a4b6d9b3037b1b46eb683c9a40e5"} Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.431743 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-httpd" containerID="cri-o://3ed7fe292e21441ba342c247adcd5a75f3e2a4b6d9b3037b1b46eb683c9a40e5" gracePeriod=30 Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.437089 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-tls-certs\") pod \"horizon-69bf4987b8-zq2rd\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.494612 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7495bf65bd-857k2"] Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.501822 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.525685 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.526276 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7495bf65bd-857k2"] Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.577903 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-horizon-secret-key\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.577965 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2860dde6-602b-417e-9819-6ce526ed2eb9-logs\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.578009 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2860dde6-602b-417e-9819-6ce526ed2eb9-scripts\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.578028 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-horizon-tls-certs\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.578076 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2860dde6-602b-417e-9819-6ce526ed2eb9-config-data\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.578108 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-955mx\" (UniqueName: \"kubernetes.io/projected/2860dde6-602b-417e-9819-6ce526ed2eb9-kube-api-access-955mx\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.578135 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-combined-ca-bundle\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.679189 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-horizon-secret-key\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.679232 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2860dde6-602b-417e-9819-6ce526ed2eb9-logs\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.679279 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2860dde6-602b-417e-9819-6ce526ed2eb9-scripts\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.679298 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-horizon-tls-certs\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.679327 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2860dde6-602b-417e-9819-6ce526ed2eb9-config-data\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.679358 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-955mx\" (UniqueName: \"kubernetes.io/projected/2860dde6-602b-417e-9819-6ce526ed2eb9-kube-api-access-955mx\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.679386 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-combined-ca-bundle\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.680358 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2860dde6-602b-417e-9819-6ce526ed2eb9-scripts\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.683435 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2860dde6-602b-417e-9819-6ce526ed2eb9-logs\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.684608 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-horizon-secret-key\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.685592 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2860dde6-602b-417e-9819-6ce526ed2eb9-config-data\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.686738 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-combined-ca-bundle\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.690665 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2860dde6-602b-417e-9819-6ce526ed2eb9-horizon-tls-certs\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.703594 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-955mx\" (UniqueName: \"kubernetes.io/projected/2860dde6-602b-417e-9819-6ce526ed2eb9-kube-api-access-955mx\") pod \"horizon-7495bf65bd-857k2\" (UID: \"2860dde6-602b-417e-9819-6ce526ed2eb9\") " pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.751005 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.750985464 podStartE2EDuration="9.750985464s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:11:38.744500012 +0000 UTC m=+911.288838455" watchObservedRunningTime="2026-02-02 09:11:38.750985464 +0000 UTC m=+911.295323897" Feb 02 09:11:38 crc kubenswrapper[4747]: I0202 09:11:38.874611 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:11:39 crc kubenswrapper[4747]: I0202 09:11:39.448659 4747 generic.go:334] "Generic (PLEG): container finished" podID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerID="3ed7fe292e21441ba342c247adcd5a75f3e2a4b6d9b3037b1b46eb683c9a40e5" exitCode=0 Feb 02 09:11:39 crc kubenswrapper[4747]: I0202 09:11:39.448690 4747 generic.go:334] "Generic (PLEG): container finished" podID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerID="f4ed110df3c22f3e6c05f3c6df1845789ec3fc836f6df0cba00de97f60c379fe" exitCode=143 Feb 02 09:11:39 crc kubenswrapper[4747]: I0202 09:11:39.448705 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8bbcc3f0-dc53-4b02-a05c-d57a54874477","Type":"ContainerDied","Data":"3ed7fe292e21441ba342c247adcd5a75f3e2a4b6d9b3037b1b46eb683c9a40e5"} Feb 02 09:11:39 crc kubenswrapper[4747]: I0202 09:11:39.448821 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8bbcc3f0-dc53-4b02-a05c-d57a54874477","Type":"ContainerDied","Data":"f4ed110df3c22f3e6c05f3c6df1845789ec3fc836f6df0cba00de97f60c379fe"} Feb 02 09:11:40 crc kubenswrapper[4747]: I0202 09:11:40.457627 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:11:40 crc kubenswrapper[4747]: I0202 09:11:40.526811 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-c74xs"] Feb 02 09:11:40 crc kubenswrapper[4747]: I0202 09:11:40.527043 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" containerID="cri-o://2a15e11eea5e8c2b20655968369371d898a8496fad6a614e52ceccfd07d54791" gracePeriod=10 Feb 02 09:11:40 crc kubenswrapper[4747]: E0202 09:11:40.805284 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ad6076b_9427_4d7c_8219_357cd2e45b4b.slice/crio-conmon-2a15e11eea5e8c2b20655968369371d898a8496fad6a614e52ceccfd07d54791.scope\": RecentStats: unable to find data in memory cache]" Feb 02 09:11:41 crc kubenswrapper[4747]: I0202 09:11:41.473816 4747 generic.go:334] "Generic (PLEG): container finished" podID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerID="2a15e11eea5e8c2b20655968369371d898a8496fad6a614e52ceccfd07d54791" exitCode=0 Feb 02 09:11:41 crc kubenswrapper[4747]: I0202 09:11:41.473872 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" event={"ID":"6ad6076b-9427-4d7c-8219-357cd2e45b4b","Type":"ContainerDied","Data":"2a15e11eea5e8c2b20655968369371d898a8496fad6a614e52ceccfd07d54791"} Feb 02 09:11:43 crc kubenswrapper[4747]: I0202 09:11:43.187559 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Feb 02 09:11:47 crc kubenswrapper[4747]: E0202 09:11:47.175991 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 02 09:11:47 crc kubenswrapper[4747]: E0202 09:11:47.176419 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n88h68h77hb7hf5h685h596h5b6hc6h5c8h5d6h5b9h587h5f5hc7h7dh5f6h568h546h549hc7h69h87hb8h564hf9h59dh66dh684h5b4h66h99q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8784z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7dcdbf6f-bvzkf_openstack(12c81b21-3f11-4c58-b1ee-a23cf95e4db1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:11:47 crc kubenswrapper[4747]: E0202 09:11:47.183971 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7dcdbf6f-bvzkf" podUID="12c81b21-3f11-4c58-b1ee-a23cf95e4db1" Feb 02 09:11:47 crc kubenswrapper[4747]: E0202 09:11:47.207780 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 02 09:11:47 crc kubenswrapper[4747]: E0202 09:11:47.208030 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nc8h5c6h67fhdfh67fh656h588h58bh58bhcbh57h6dh57h554h55fh5c7h647h66dh5b9h644h6bh547h688h679h79h74hb6h5c6h77hb7h77h75q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-47l8l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-f57c69f85-bvxh8_openstack(98633938-4be5-489e-8e19-b23502ef4a40): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:11:47 crc kubenswrapper[4747]: E0202 09:11:47.210399 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-f57c69f85-bvxh8" podUID="98633938-4be5-489e-8e19-b23502ef4a40" Feb 02 09:11:47 crc kubenswrapper[4747]: I0202 09:11:47.883389 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-b58ss" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="registry-server" probeResult="failure" output=< Feb 02 09:11:47 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:11:47 crc kubenswrapper[4747]: > Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.331713 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ccgcp"] Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.333959 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.380353 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ccgcp"] Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.390477 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kwtb\" (UniqueName: \"kubernetes.io/projected/07972a25-4956-4f3d-b9be-0c555b1906df-kube-api-access-9kwtb\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.390714 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-utilities\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.392199 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-catalog-content\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.494716 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-utilities\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.494888 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-catalog-content\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.495027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kwtb\" (UniqueName: \"kubernetes.io/projected/07972a25-4956-4f3d-b9be-0c555b1906df-kube-api-access-9kwtb\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.495619 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-utilities\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.495697 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-catalog-content\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.513714 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kwtb\" (UniqueName: \"kubernetes.io/projected/07972a25-4956-4f3d-b9be-0c555b1906df-kube-api-access-9kwtb\") pod \"redhat-marketplace-ccgcp\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:48 crc kubenswrapper[4747]: I0202 09:11:48.662732 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:11:49 crc kubenswrapper[4747]: E0202 09:11:49.430813 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Feb 02 09:11:49 crc kubenswrapper[4747]: E0202 09:11:49.431337 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vnlbp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-kcg7d_openstack(2551c83c-96c3-43d9-916d-04bf8bbaf85a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:11:49 crc kubenswrapper[4747]: E0202 09:11:49.432534 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-kcg7d" podUID="2551c83c-96c3-43d9-916d-04bf8bbaf85a" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.505129 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.551364 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xdgq9" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.551533 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xdgq9" event={"ID":"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825","Type":"ContainerDied","Data":"e10b7550f0848dd05fc3a2e7ebb2dd8f87c6c854afb13e5c361fdbc2e6552d56"} Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.551557 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e10b7550f0848dd05fc3a2e7ebb2dd8f87c6c854afb13e5c361fdbc2e6552d56" Feb 02 09:11:49 crc kubenswrapper[4747]: E0202 09:11:49.555250 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-kcg7d" podUID="2551c83c-96c3-43d9-916d-04bf8bbaf85a" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.619052 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-config-data\") pod \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.619383 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v48n7\" (UniqueName: \"kubernetes.io/projected/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-kube-api-access-v48n7\") pod \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.619408 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-fernet-keys\") pod \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.619635 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-combined-ca-bundle\") pod \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.619743 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-scripts\") pod \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.619865 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-credential-keys\") pod \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\" (UID: \"bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825\") " Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.629076 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" (UID: "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.631686 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-scripts" (OuterVolumeSpecName: "scripts") pod "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" (UID: "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.638775 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-kube-api-access-v48n7" (OuterVolumeSpecName: "kube-api-access-v48n7") pod "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" (UID: "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825"). InnerVolumeSpecName "kube-api-access-v48n7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.666245 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" (UID: "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.667679 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" (UID: "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.722747 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v48n7\" (UniqueName: \"kubernetes.io/projected/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-kube-api-access-v48n7\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.722780 4747 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.722790 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.722798 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.722808 4747 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.730530 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-config-data" (OuterVolumeSpecName: "config-data") pod "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" (UID: "bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:11:49 crc kubenswrapper[4747]: I0202 09:11:49.824529 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.518634 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.519731 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.603632 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-xdgq9"] Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.611461 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-xdgq9"] Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.702048 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tmgr2"] Feb 02 09:11:50 crc kubenswrapper[4747]: E0202 09:11:50.702453 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" containerName="keystone-bootstrap" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.702474 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" containerName="keystone-bootstrap" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.702659 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" containerName="keystone-bootstrap" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.703486 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.706179 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.706362 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.706487 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.706642 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.706790 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hr2fd" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.709805 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tmgr2"] Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.840686 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrk98\" (UniqueName: \"kubernetes.io/projected/73275940-43ea-40ea-ba5f-5b8e25a35f62-kube-api-access-qrk98\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.840749 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-credential-keys\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.840908 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-fernet-keys\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.840974 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-combined-ca-bundle\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.841117 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-scripts\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.841254 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-config-data\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.942502 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrk98\" (UniqueName: \"kubernetes.io/projected/73275940-43ea-40ea-ba5f-5b8e25a35f62-kube-api-access-qrk98\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.942584 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-credential-keys\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.942629 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-fernet-keys\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.942655 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-combined-ca-bundle\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.942708 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-scripts\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.942735 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-config-data\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.949144 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-config-data\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.949470 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-fernet-keys\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.949899 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-scripts\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.950009 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-combined-ca-bundle\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.962436 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-credential-keys\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:50 crc kubenswrapper[4747]: I0202 09:11:50.966135 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrk98\" (UniqueName: \"kubernetes.io/projected/73275940-43ea-40ea-ba5f-5b8e25a35f62-kube-api-access-qrk98\") pod \"keystone-bootstrap-tmgr2\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:51 crc kubenswrapper[4747]: I0202 09:11:51.032987 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:11:52 crc kubenswrapper[4747]: E0202 09:11:52.183217 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 02 09:11:52 crc kubenswrapper[4747]: E0202 09:11:52.183407 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n66ch5ffh646h565h5d4h589hb6h5h65bh5bh5ch544h5cbh5b5h5d5hffh5f8h586h547hffh7bh6bh64bh54chfdhc7hfch5bfh77h69h67dh595q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jbgdb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-65cfbfbb97-gsrk6_openstack(bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:11:52 crc kubenswrapper[4747]: E0202 09:11:52.186112 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-65cfbfbb97-gsrk6" podUID="bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6" Feb 02 09:11:52 crc kubenswrapper[4747]: I0202 09:11:52.349966 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825" path="/var/lib/kubelet/pods/bf06a5f3-8f9b-4e7e-b6f0-a9c1a19a2825/volumes" Feb 02 09:11:53 crc kubenswrapper[4747]: I0202 09:11:53.187504 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Feb 02 09:11:53 crc kubenswrapper[4747]: I0202 09:11:53.585200 4747 generic.go:334] "Generic (PLEG): container finished" podID="1079c308-67ae-4be0-903d-49b2a0f0aa59" containerID="515c6e3d007f2d8d3b02472ebcc2f8d945b7fc435544430a8ea193ec7b7a90be" exitCode=0 Feb 02 09:11:53 crc kubenswrapper[4747]: I0202 09:11:53.585252 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vk6l" event={"ID":"1079c308-67ae-4be0-903d-49b2a0f0aa59","Type":"ContainerDied","Data":"515c6e3d007f2d8d3b02472ebcc2f8d945b7fc435544430a8ea193ec7b7a90be"} Feb 02 09:11:56 crc kubenswrapper[4747]: I0202 09:11:56.861180 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:56 crc kubenswrapper[4747]: I0202 09:11:56.913519 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:11:57 crc kubenswrapper[4747]: I0202 09:11:57.100836 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b58ss"] Feb 02 09:11:58 crc kubenswrapper[4747]: I0202 09:11:58.187798 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Feb 02 09:11:58 crc kubenswrapper[4747]: I0202 09:11:58.188662 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:58 crc kubenswrapper[4747]: I0202 09:11:58.631478 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b58ss" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="registry-server" containerID="cri-o://d171b71c5c2623728ea715cc710d15de8b12fd8d99f9560093f8dc5d07435149" gracePeriod=2 Feb 02 09:11:59 crc kubenswrapper[4747]: I0202 09:11:59.649311 4747 generic.go:334] "Generic (PLEG): container finished" podID="e35ebb79-3f29-4695-af28-732b49057cfa" containerID="d171b71c5c2623728ea715cc710d15de8b12fd8d99f9560093f8dc5d07435149" exitCode=0 Feb 02 09:11:59 crc kubenswrapper[4747]: I0202 09:11:59.649363 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b58ss" event={"ID":"e35ebb79-3f29-4695-af28-732b49057cfa","Type":"ContainerDied","Data":"d171b71c5c2623728ea715cc710d15de8b12fd8d99f9560093f8dc5d07435149"} Feb 02 09:11:59 crc kubenswrapper[4747]: I0202 09:11:59.911927 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:11:59 crc kubenswrapper[4747]: I0202 09:11:59.919600 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:11:59 crc kubenswrapper[4747]: I0202 09:11:59.928926 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:11:59 crc kubenswrapper[4747]: I0202 09:11:59.963845 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:11:59 crc kubenswrapper[4747]: I0202 09:11:59.965921 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.046631 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-scripts\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.046694 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-config-data\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.046732 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-logs\") pod \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.046763 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.046826 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8784z\" (UniqueName: \"kubernetes.io/projected/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-kube-api-access-8784z\") pod \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.046862 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98633938-4be5-489e-8e19-b23502ef4a40-logs\") pod \"98633938-4be5-489e-8e19-b23502ef4a40\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.046914 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-combined-ca-bundle\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047013 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-sb\") pod \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047049 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-scripts\") pod \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047118 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98633938-4be5-489e-8e19-b23502ef4a40-horizon-secret-key\") pod \"98633938-4be5-489e-8e19-b23502ef4a40\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047152 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-logs\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047201 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxrhl\" (UniqueName: \"kubernetes.io/projected/8bbcc3f0-dc53-4b02-a05c-d57a54874477-kube-api-access-jxrhl\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047252 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-config-data\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047286 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-dns-svc\") pod \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047324 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-combined-ca-bundle\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047358 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5k9r\" (UniqueName: \"kubernetes.io/projected/6ad6076b-9427-4d7c-8219-357cd2e45b4b-kube-api-access-w5k9r\") pod \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-public-tls-certs\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047457 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-scripts\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047500 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-logs\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047532 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-config-data\") pod \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047587 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-scripts\") pod \"98633938-4be5-489e-8e19-b23502ef4a40\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047635 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-config\") pod \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047677 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47l8l\" (UniqueName: \"kubernetes.io/projected/98633938-4be5-489e-8e19-b23502ef4a40-kube-api-access-47l8l\") pod \"98633938-4be5-489e-8e19-b23502ef4a40\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047721 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-config-data\") pod \"98633938-4be5-489e-8e19-b23502ef4a40\" (UID: \"98633938-4be5-489e-8e19-b23502ef4a40\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047766 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-nb\") pod \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\" (UID: \"6ad6076b-9427-4d7c-8219-357cd2e45b4b\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047800 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-httpd-run\") pod \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\" (UID: \"8bbcc3f0-dc53-4b02-a05c-d57a54874477\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047840 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g97p4\" (UniqueName: \"kubernetes.io/projected/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-kube-api-access-g97p4\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047904 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047968 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-internal-tls-certs\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.048006 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-horizon-secret-key\") pod \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\" (UID: \"12c81b21-3f11-4c58-b1ee-a23cf95e4db1\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.048041 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-httpd-run\") pod \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\" (UID: \"0e0f1b52-714c-49f2-a734-30de1ef5c0d4\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047194 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-logs" (OuterVolumeSpecName: "logs") pod "12c81b21-3f11-4c58-b1ee-a23cf95e4db1" (UID: "12c81b21-3f11-4c58-b1ee-a23cf95e4db1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047361 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98633938-4be5-489e-8e19-b23502ef4a40-logs" (OuterVolumeSpecName: "logs") pod "98633938-4be5-489e-8e19-b23502ef4a40" (UID: "98633938-4be5-489e-8e19-b23502ef4a40"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.047552 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-logs" (OuterVolumeSpecName: "logs") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.048288 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-scripts" (OuterVolumeSpecName: "scripts") pod "12c81b21-3f11-4c58-b1ee-a23cf95e4db1" (UID: "12c81b21-3f11-4c58-b1ee-a23cf95e4db1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.048894 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.051036 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-kube-api-access-8784z" (OuterVolumeSpecName: "kube-api-access-8784z") pod "12c81b21-3f11-4c58-b1ee-a23cf95e4db1" (UID: "12c81b21-3f11-4c58-b1ee-a23cf95e4db1"). InnerVolumeSpecName "kube-api-access-8784z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.051530 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98633938-4be5-489e-8e19-b23502ef4a40-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "98633938-4be5-489e-8e19-b23502ef4a40" (UID: "98633938-4be5-489e-8e19-b23502ef4a40"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.052724 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-config-data" (OuterVolumeSpecName: "config-data") pod "12c81b21-3f11-4c58-b1ee-a23cf95e4db1" (UID: "12c81b21-3f11-4c58-b1ee-a23cf95e4db1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.053416 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-logs" (OuterVolumeSpecName: "logs") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.054038 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-scripts" (OuterVolumeSpecName: "scripts") pod "98633938-4be5-489e-8e19-b23502ef4a40" (UID: "98633938-4be5-489e-8e19-b23502ef4a40"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.054814 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-scripts" (OuterVolumeSpecName: "scripts") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.056111 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.059397 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98633938-4be5-489e-8e19-b23502ef4a40-kube-api-access-47l8l" (OuterVolumeSpecName: "kube-api-access-47l8l") pod "98633938-4be5-489e-8e19-b23502ef4a40" (UID: "98633938-4be5-489e-8e19-b23502ef4a40"). InnerVolumeSpecName "kube-api-access-47l8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.060099 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-config-data" (OuterVolumeSpecName: "config-data") pod "98633938-4be5-489e-8e19-b23502ef4a40" (UID: "98633938-4be5-489e-8e19-b23502ef4a40"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.064126 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-scripts" (OuterVolumeSpecName: "scripts") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.064667 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ad6076b-9427-4d7c-8219-357cd2e45b4b-kube-api-access-w5k9r" (OuterVolumeSpecName: "kube-api-access-w5k9r") pod "6ad6076b-9427-4d7c-8219-357cd2e45b4b" (UID: "6ad6076b-9427-4d7c-8219-357cd2e45b4b"). InnerVolumeSpecName "kube-api-access-w5k9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.068211 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bbcc3f0-dc53-4b02-a05c-d57a54874477-kube-api-access-jxrhl" (OuterVolumeSpecName: "kube-api-access-jxrhl") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "kube-api-access-jxrhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.068627 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.070681 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.071200 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-kube-api-access-g97p4" (OuterVolumeSpecName: "kube-api-access-g97p4") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "kube-api-access-g97p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.077814 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "12c81b21-3f11-4c58-b1ee-a23cf95e4db1" (UID: "12c81b21-3f11-4c58-b1ee-a23cf95e4db1"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.129126 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6ad6076b-9427-4d7c-8219-357cd2e45b4b" (UID: "6ad6076b-9427-4d7c-8219-357cd2e45b4b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.130269 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6ad6076b-9427-4d7c-8219-357cd2e45b4b" (UID: "6ad6076b-9427-4d7c-8219-357cd2e45b4b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.131587 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.131828 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.139750 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-config" (OuterVolumeSpecName: "config") pod "6ad6076b-9427-4d7c-8219-357cd2e45b4b" (UID: "6ad6076b-9427-4d7c-8219-357cd2e45b4b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.148203 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-config-data" (OuterVolumeSpecName: "config-data") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.149880 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150175 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150219 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150235 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8784z\" (UniqueName: \"kubernetes.io/projected/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-kube-api-access-8784z\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150250 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98633938-4be5-489e-8e19-b23502ef4a40-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150261 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150273 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150285 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150297 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/98633938-4be5-489e-8e19-b23502ef4a40-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150311 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150323 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxrhl\" (UniqueName: \"kubernetes.io/projected/8bbcc3f0-dc53-4b02-a05c-d57a54874477-kube-api-access-jxrhl\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150336 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150347 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150360 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150371 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5k9r\" (UniqueName: \"kubernetes.io/projected/6ad6076b-9427-4d7c-8219-357cd2e45b4b-kube-api-access-w5k9r\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150381 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150391 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150401 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150413 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150424 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150435 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47l8l\" (UniqueName: \"kubernetes.io/projected/98633938-4be5-489e-8e19-b23502ef4a40-kube-api-access-47l8l\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150445 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98633938-4be5-489e-8e19-b23502ef4a40-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150455 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8bbcc3f0-dc53-4b02-a05c-d57a54874477-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150465 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g97p4\" (UniqueName: \"kubernetes.io/projected/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-kube-api-access-g97p4\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150498 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150511 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/12c81b21-3f11-4c58-b1ee-a23cf95e4db1-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.150523 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.153125 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-config-data" (OuterVolumeSpecName: "config-data") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.153717 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8bbcc3f0-dc53-4b02-a05c-d57a54874477" (UID: "8bbcc3f0-dc53-4b02-a05c-d57a54874477"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.161834 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6ad6076b-9427-4d7c-8219-357cd2e45b4b" (UID: "6ad6076b-9427-4d7c-8219-357cd2e45b4b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.165360 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0e0f1b52-714c-49f2-a734-30de1ef5c0d4" (UID: "0e0f1b52-714c-49f2-a734-30de1ef5c0d4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.169072 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.176644 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.251742 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ad6076b-9427-4d7c-8219-357cd2e45b4b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.251771 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.251783 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.251791 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e0f1b52-714c-49f2-a734-30de1ef5c0d4-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.251800 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.251809 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bbcc3f0-dc53-4b02-a05c-d57a54874477-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.343481 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.348662 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.351429 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.351567 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n9fh67dh676h57fh5cbh5dbh689hc6h96h598h5dfh58bh59dh5b4h54dh87h99hdhb6h645h5cbh688h8fh5fh685h689h589hfh67h668h97h656q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pth77,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(0df43a11-4442-4371-bdb3-b49610cefdc0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.453995 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbgdb\" (UniqueName: \"kubernetes.io/projected/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-kube-api-access-jbgdb\") pod \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.454033 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-config\") pod \"1079c308-67ae-4be0-903d-49b2a0f0aa59\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.454075 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-horizon-secret-key\") pod \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.454167 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-scripts\") pod \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.454186 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxxbc\" (UniqueName: \"kubernetes.io/projected/1079c308-67ae-4be0-903d-49b2a0f0aa59-kube-api-access-zxxbc\") pod \"1079c308-67ae-4be0-903d-49b2a0f0aa59\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.454208 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-logs\") pod \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.454305 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-combined-ca-bundle\") pod \"1079c308-67ae-4be0-903d-49b2a0f0aa59\" (UID: \"1079c308-67ae-4be0-903d-49b2a0f0aa59\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.454331 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-config-data\") pod \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\" (UID: \"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6\") " Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.459182 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-config-data" (OuterVolumeSpecName: "config-data") pod "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6" (UID: "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.459321 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-kube-api-access-jbgdb" (OuterVolumeSpecName: "kube-api-access-jbgdb") pod "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6" (UID: "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6"). InnerVolumeSpecName "kube-api-access-jbgdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.459832 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-scripts" (OuterVolumeSpecName: "scripts") pod "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6" (UID: "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.460106 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-logs" (OuterVolumeSpecName: "logs") pod "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6" (UID: "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.463597 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1079c308-67ae-4be0-903d-49b2a0f0aa59-kube-api-access-zxxbc" (OuterVolumeSpecName: "kube-api-access-zxxbc") pod "1079c308-67ae-4be0-903d-49b2a0f0aa59" (UID: "1079c308-67ae-4be0-903d-49b2a0f0aa59"). InnerVolumeSpecName "kube-api-access-zxxbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.463706 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6" (UID: "bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.476583 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-config" (OuterVolumeSpecName: "config") pod "1079c308-67ae-4be0-903d-49b2a0f0aa59" (UID: "1079c308-67ae-4be0-903d-49b2a0f0aa59"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.481847 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1079c308-67ae-4be0-903d-49b2a0f0aa59" (UID: "1079c308-67ae-4be0-903d-49b2a0f0aa59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555704 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxxbc\" (UniqueName: \"kubernetes.io/projected/1079c308-67ae-4be0-903d-49b2a0f0aa59-kube-api-access-zxxbc\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555739 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555751 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555761 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555771 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbgdb\" (UniqueName: \"kubernetes.io/projected/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-kube-api-access-jbgdb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555783 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1079c308-67ae-4be0-903d-49b2a0f0aa59-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555792 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.555803 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.659731 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e0f1b52-714c-49f2-a734-30de1ef5c0d4","Type":"ContainerDied","Data":"7665c3f760e39783bb27ce808ee1cecdfdfa95e535de4bb658da4a4f75501cf5"} Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.659789 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.659801 4747 scope.go:117] "RemoveContainer" containerID="342f058c107e925c8426427eaaf05936a77b6725a60b4c27acfb67384778920b" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.661472 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dcdbf6f-bvzkf" event={"ID":"12c81b21-3f11-4c58-b1ee-a23cf95e4db1","Type":"ContainerDied","Data":"72c6affa8e5dd89821d5c454af0fdfafbaef0bdfb023b02dfaa12a71156f03f7"} Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.661550 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dcdbf6f-bvzkf" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.663412 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65cfbfbb97-gsrk6" event={"ID":"bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6","Type":"ContainerDied","Data":"706c1975454c3ed8eff3c6bc936e1a8f14ee367a3805d1e45bca7d16f22b2e45"} Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.663525 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65cfbfbb97-gsrk6" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.665240 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vk6l" event={"ID":"1079c308-67ae-4be0-903d-49b2a0f0aa59","Type":"ContainerDied","Data":"3ada819319b546301286f1e59a5f3badcd9f31a98f4d8945b10f88fb139e798c"} Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.665277 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ada819319b546301286f1e59a5f3badcd9f31a98f4d8945b10f88fb139e798c" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.665344 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vk6l" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.667643 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8bbcc3f0-dc53-4b02-a05c-d57a54874477","Type":"ContainerDied","Data":"43fc6afb926cc8b4ec3fd590250468e0162dff9676e79fd237673f1e1a6941ef"} Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.667696 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.669437 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" event={"ID":"6ad6076b-9427-4d7c-8219-357cd2e45b4b","Type":"ContainerDied","Data":"a0ea834c9eda4dce074933060422e2253230de3d33d06c14c3315be2f5b9045e"} Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.669483 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.670394 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f57c69f85-bvxh8" event={"ID":"98633938-4be5-489e-8e19-b23502ef4a40","Type":"ContainerDied","Data":"a55ddc1d2880ad03dbe13ed9428263121bb0b00ca7160aa1669ee262ae3e105d"} Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.670434 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f57c69f85-bvxh8" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.692215 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.710117 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.736493 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.737457 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1079c308-67ae-4be0-903d-49b2a0f0aa59" containerName="neutron-db-sync" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737483 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1079c308-67ae-4be0-903d-49b2a0f0aa59" containerName="neutron-db-sync" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.737502 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-httpd" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737510 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-httpd" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.737528 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-log" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737534 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-log" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.737564 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737573 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.737588 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="init" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737596 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="init" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.737606 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-httpd" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737613 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-httpd" Feb 02 09:12:00 crc kubenswrapper[4747]: E0202 09:12:00.737631 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-log" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737637 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-log" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737880 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-httpd" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737914 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737926 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-log" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737950 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="1079c308-67ae-4be0-903d-49b2a0f0aa59" containerName="neutron-db-sync" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737964 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" containerName="glance-log" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.737973 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" containerName="glance-httpd" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.739280 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.741457 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.742126 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2v6s8" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.746198 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.746289 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.753698 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.815609 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7dcdbf6f-bvzkf"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.825036 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7dcdbf6f-bvzkf"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.840724 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f57c69f85-bvxh8"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.847044 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-f57c69f85-bvxh8"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.853594 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-c74xs"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.861104 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-c74xs"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862290 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862336 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862374 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862403 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862424 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862447 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862480 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc5nj\" (UniqueName: \"kubernetes.io/projected/cfce7950-bdca-4938-9579-4abf3357d9f0-kube-api-access-fc5nj\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.862531 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.874818 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-65cfbfbb97-gsrk6"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.880737 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-65cfbfbb97-gsrk6"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.885359 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.893986 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.909057 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.910877 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.913834 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.915546 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.916492 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.966808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.966884 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.966916 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.966957 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.966986 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.967007 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.967031 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.967062 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc5nj\" (UniqueName: \"kubernetes.io/projected/cfce7950-bdca-4938-9579-4abf3357d9f0-kube-api-access-fc5nj\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.967713 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-logs\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.967755 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.967829 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.976562 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.977189 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.978378 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.990188 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:00 crc kubenswrapper[4747]: I0202 09:12:00.995674 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc5nj\" (UniqueName: \"kubernetes.io/projected/cfce7950-bdca-4938-9579-4abf3357d9f0-kube-api-access-fc5nj\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.013216 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.061449 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.068718 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.068773 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.068797 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.068893 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.068926 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-config-data\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.068999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-scripts\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.069020 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqp6p\" (UniqueName: \"kubernetes.io/projected/494e1957-39a0-4704-b0d3-7475cf24178b-kube-api-access-hqp6p\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.069462 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-logs\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.170567 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-config-data\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.170643 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-scripts\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.170671 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqp6p\" (UniqueName: \"kubernetes.io/projected/494e1957-39a0-4704-b0d3-7475cf24178b-kube-api-access-hqp6p\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.171355 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-logs\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.171436 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.171479 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.171515 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.171559 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.172199 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.172227 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-logs\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.172481 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.186918 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.187417 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-scripts\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.187589 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-config-data\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.199708 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqp6p\" (UniqueName: \"kubernetes.io/projected/494e1957-39a0-4704-b0d3-7475cf24178b-kube-api-access-hqp6p\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.221121 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.223146 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.232680 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.485290 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-j22q5"] Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.486917 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.502781 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-j22q5"] Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.585404 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.585464 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc9qz\" (UniqueName: \"kubernetes.io/projected/cf03e9ce-2830-49eb-ac1e-cacacd78e942-kube-api-access-wc9qz\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.585499 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.585518 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.585615 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-config\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.585632 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.616874 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-55c4f4d6c6-jtktd"] Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.618569 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.627717 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-wdwjv" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.627974 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.628135 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.628374 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.645556 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55c4f4d6c6-jtktd"] Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.690789 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-httpd-config\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.690913 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnppm\" (UniqueName: \"kubernetes.io/projected/5760787c-4f47-4115-bd6d-12f036d73793-kube-api-access-gnppm\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691014 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-config\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691039 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691085 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-config\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691141 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-combined-ca-bundle\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691161 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691210 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc9qz\" (UniqueName: \"kubernetes.io/projected/cf03e9ce-2830-49eb-ac1e-cacacd78e942-kube-api-access-wc9qz\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691255 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-ovndb-tls-certs\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691283 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.691311 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.692313 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.693193 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-config\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.693721 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.693807 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.694144 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.715685 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc9qz\" (UniqueName: \"kubernetes.io/projected/cf03e9ce-2830-49eb-ac1e-cacacd78e942-kube-api-access-wc9qz\") pod \"dnsmasq-dns-84b966f6c9-j22q5\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.792442 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-ovndb-tls-certs\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.792530 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-httpd-config\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.792566 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnppm\" (UniqueName: \"kubernetes.io/projected/5760787c-4f47-4115-bd6d-12f036d73793-kube-api-access-gnppm\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.792617 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-config\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.792647 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-combined-ca-bundle\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.797816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-ovndb-tls-certs\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.798792 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-httpd-config\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.799202 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-combined-ca-bundle\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.799712 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-config\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.810613 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnppm\" (UniqueName: \"kubernetes.io/projected/5760787c-4f47-4115-bd6d-12f036d73793-kube-api-access-gnppm\") pod \"neutron-55c4f4d6c6-jtktd\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.815235 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:01 crc kubenswrapper[4747]: I0202 09:12:01.949411 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.356256 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e0f1b52-714c-49f2-a734-30de1ef5c0d4" path="/var/lib/kubelet/pods/0e0f1b52-714c-49f2-a734-30de1ef5c0d4/volumes" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.357316 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12c81b21-3f11-4c58-b1ee-a23cf95e4db1" path="/var/lib/kubelet/pods/12c81b21-3f11-4c58-b1ee-a23cf95e4db1/volumes" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.357753 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" path="/var/lib/kubelet/pods/6ad6076b-9427-4d7c-8219-357cd2e45b4b/volumes" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.359180 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bbcc3f0-dc53-4b02-a05c-d57a54874477" path="/var/lib/kubelet/pods/8bbcc3f0-dc53-4b02-a05c-d57a54874477/volumes" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.360204 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98633938-4be5-489e-8e19-b23502ef4a40" path="/var/lib/kubelet/pods/98633938-4be5-489e-8e19-b23502ef4a40/volumes" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.360732 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6" path="/var/lib/kubelet/pods/bc31ebdd-a4ef-496d-92d6-7ec14d09e9a6/volumes" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.680233 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.753396 4747 scope.go:117] "RemoveContainer" containerID="0b6e02fe0a534308f5085acf3ad53798a1102dc1b283307aff7170116c198c39" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.757995 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vxw9\" (UniqueName: \"kubernetes.io/projected/e35ebb79-3f29-4695-af28-732b49057cfa-kube-api-access-5vxw9\") pod \"e35ebb79-3f29-4695-af28-732b49057cfa\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.758117 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-utilities\") pod \"e35ebb79-3f29-4695-af28-732b49057cfa\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.759184 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-catalog-content\") pod \"e35ebb79-3f29-4695-af28-732b49057cfa\" (UID: \"e35ebb79-3f29-4695-af28-732b49057cfa\") " Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.773744 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-utilities" (OuterVolumeSpecName: "utilities") pod "e35ebb79-3f29-4695-af28-732b49057cfa" (UID: "e35ebb79-3f29-4695-af28-732b49057cfa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.779885 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e35ebb79-3f29-4695-af28-732b49057cfa-kube-api-access-5vxw9" (OuterVolumeSpecName: "kube-api-access-5vxw9") pod "e35ebb79-3f29-4695-af28-732b49057cfa" (UID: "e35ebb79-3f29-4695-af28-732b49057cfa"). InnerVolumeSpecName "kube-api-access-5vxw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.814177 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b58ss" event={"ID":"e35ebb79-3f29-4695-af28-732b49057cfa","Type":"ContainerDied","Data":"374586f7eaf4c4a7099f7987d2f6810e13dfb4864d39d93ccad40a553b8618c7"} Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.814219 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b58ss" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.823381 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e35ebb79-3f29-4695-af28-732b49057cfa" (UID: "e35ebb79-3f29-4695-af28-732b49057cfa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.868770 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vxw9\" (UniqueName: \"kubernetes.io/projected/e35ebb79-3f29-4695-af28-732b49057cfa-kube-api-access-5vxw9\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.868806 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.868818 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e35ebb79-3f29-4695-af28-732b49057cfa-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.910992 4747 scope.go:117] "RemoveContainer" containerID="3ed7fe292e21441ba342c247adcd5a75f3e2a4b6d9b3037b1b46eb683c9a40e5" Feb 02 09:12:02 crc kubenswrapper[4747]: E0202 09:12:02.921909 4747 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/glance-default-external-api-0_openstack_glance-httpd-3ed7fe292e21441ba342c247adcd5a75f3e2a4b6d9b3037b1b46eb683c9a40e5.log: no such file or directory" path="/var/log/containers/glance-default-external-api-0_openstack_glance-httpd-3ed7fe292e21441ba342c247adcd5a75f3e2a4b6d9b3037b1b46eb683c9a40e5.log" Feb 02 09:12:02 crc kubenswrapper[4747]: I0202 09:12:02.989196 4747 scope.go:117] "RemoveContainer" containerID="f4ed110df3c22f3e6c05f3c6df1845789ec3fc836f6df0cba00de97f60c379fe" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.059446 4747 scope.go:117] "RemoveContainer" containerID="2a15e11eea5e8c2b20655968369371d898a8496fad6a614e52ceccfd07d54791" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.102586 4747 scope.go:117] "RemoveContainer" containerID="5c11dd1d7a6bec2e5eaa3f10427288deb78c985e5e505ffb3ff94112c934e436" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.137526 4747 scope.go:117] "RemoveContainer" containerID="d171b71c5c2623728ea715cc710d15de8b12fd8d99f9560093f8dc5d07435149" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.168598 4747 scope.go:117] "RemoveContainer" containerID="0a5008083d60401a9a812da56ee73da01bdedecdd7b5533f2d4e8407c4e999fd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.176762 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b58ss"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.185212 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b58ss"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.188304 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b946c75cc-c74xs" podUID="6ad6076b-9427-4d7c-8219-357cd2e45b4b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.203724 4747 scope.go:117] "RemoveContainer" containerID="80870d765b89b8d9deff47e18d2833e0d8db4d124169452a2c5683ebd7d338e4" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.284263 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69bf4987b8-zq2rd"] Feb 02 09:12:03 crc kubenswrapper[4747]: W0202 09:12:03.286199 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e1bfdd3_f021_48b0_bf1e_98c03a5d99ae.slice/crio-c082ab8faf10c0263c6de1abd51fc6d573476ff026bc934eb1e51cb040fab256 WatchSource:0}: Error finding container c082ab8faf10c0263c6de1abd51fc6d573476ff026bc934eb1e51cb040fab256: Status 404 returned error can't find the container with id c082ab8faf10c0263c6de1abd51fc6d573476ff026bc934eb1e51cb040fab256 Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.292486 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7495bf65bd-857k2"] Feb 02 09:12:03 crc kubenswrapper[4747]: E0202 09:12:03.326415 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Feb 02 09:12:03 crc kubenswrapper[4747]: E0202 09:12:03.326604 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pv9m4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-sh4ft_openstack(11768358-bd3c-440c-ac71-1c1ad4436571): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:12:03 crc kubenswrapper[4747]: E0202 09:12:03.327753 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-sh4ft" podUID="11768358-bd3c-440c-ac71-1c1ad4436571" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.635861 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7b4dbd8489-wjfdd"] Feb 02 09:12:03 crc kubenswrapper[4747]: E0202 09:12:03.636832 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="registry-server" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.636916 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="registry-server" Feb 02 09:12:03 crc kubenswrapper[4747]: E0202 09:12:03.637025 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="extract-utilities" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.637100 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="extract-utilities" Feb 02 09:12:03 crc kubenswrapper[4747]: E0202 09:12:03.637204 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="extract-content" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.637270 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="extract-content" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.637548 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" containerName="registry-server" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.639177 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.646107 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.646616 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.655848 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7b4dbd8489-wjfdd"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.681181 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tmgr2"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.690768 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-config\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.691035 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-combined-ca-bundle\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.691180 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-public-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.691332 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-httpd-config\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.691533 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-ovndb-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.694482 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-internal-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.694904 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcfmr\" (UniqueName: \"kubernetes.io/projected/03e77cb3-3fbb-4dda-8293-f4ed73283262-kube-api-access-xcfmr\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: W0202 09:12:03.698740 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73275940_43ea_40ea_ba5f_5b8e25a35f62.slice/crio-9cdc633c9dbeeffceb55582af31f2f1bc891c107d3774effda5c1f1bfe8446d7 WatchSource:0}: Error finding container 9cdc633c9dbeeffceb55582af31f2f1bc891c107d3774effda5c1f1bfe8446d7: Status 404 returned error can't find the container with id 9cdc633c9dbeeffceb55582af31f2f1bc891c107d3774effda5c1f1bfe8446d7 Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.700100 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-j22q5"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.717219 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ccgcp"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.727299 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.750321 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.796542 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-config\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.796591 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-combined-ca-bundle\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.796614 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-public-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.796662 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-httpd-config\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.796721 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-ovndb-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.796754 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-internal-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.796782 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcfmr\" (UniqueName: \"kubernetes.io/projected/03e77cb3-3fbb-4dda-8293-f4ed73283262-kube-api-access-xcfmr\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.801918 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-config\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.804855 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-httpd-config\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.804918 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-public-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.805054 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-ovndb-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.805618 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-combined-ca-bundle\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.806476 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-internal-tls-certs\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.819971 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcfmr\" (UniqueName: \"kubernetes.io/projected/03e77cb3-3fbb-4dda-8293-f4ed73283262-kube-api-access-xcfmr\") pod \"neutron-7b4dbd8489-wjfdd\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.832568 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7pc6t" event={"ID":"b3486730-26af-4fe1-a379-b28af74eb1e8","Type":"ContainerStarted","Data":"5e7f6ea423e81d9a05cfb8f9a49014a60051694136eb02e7aae65a77a804dc38"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.834788 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tmgr2" event={"ID":"73275940-43ea-40ea-ba5f-5b8e25a35f62","Type":"ContainerStarted","Data":"9cdc633c9dbeeffceb55582af31f2f1bc891c107d3774effda5c1f1bfe8446d7"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.835903 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7495bf65bd-857k2" event={"ID":"2860dde6-602b-417e-9819-6ce526ed2eb9","Type":"ContainerStarted","Data":"56da49528feb813826928439ce0612a0046bd7d9922476bced4d0e059f14be7f"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.840243 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55c4f4d6c6-jtktd"] Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.853481 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kcg7d" event={"ID":"2551c83c-96c3-43d9-916d-04bf8bbaf85a","Type":"ContainerStarted","Data":"67bef59bf70c65c9d9b3820186fcb391a2c5d4ea690f558d792d64c05cea4ad7"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.856695 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"494e1957-39a0-4704-b0d3-7475cf24178b","Type":"ContainerStarted","Data":"e088edf1f2464df10d6c55b3893d4c795a1b08e4ac79d509b47b0738e9afe7d5"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.858037 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ccgcp" event={"ID":"07972a25-4956-4f3d-b9be-0c555b1906df","Type":"ContainerStarted","Data":"ad1f442fc414e7a5f9cae8d5279d55098303585581bba8973173c99a72f3ecab"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.865297 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" event={"ID":"cf03e9ce-2830-49eb-ac1e-cacacd78e942","Type":"ContainerStarted","Data":"cf64dd4376b9c8bfa6c49d919463613f7ac2706f0bdea9edfcde7603b4c47476"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.868788 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-7pc6t" podStartSLOduration=6.24425445 podStartE2EDuration="34.868760186s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="2026-02-02 09:11:31.148216351 +0000 UTC m=+903.692554784" lastFinishedPulling="2026-02-02 09:11:59.772722087 +0000 UTC m=+932.317060520" observedRunningTime="2026-02-02 09:12:03.85337526 +0000 UTC m=+936.397713693" watchObservedRunningTime="2026-02-02 09:12:03.868760186 +0000 UTC m=+936.413098619" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.875092 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69bf4987b8-zq2rd" event={"ID":"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae","Type":"ContainerStarted","Data":"c082ab8faf10c0263c6de1abd51fc6d573476ff026bc934eb1e51cb040fab256"} Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.881704 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-kcg7d" podStartSLOduration=2.826108076 podStartE2EDuration="34.881685689s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="2026-02-02 09:11:31.340892077 +0000 UTC m=+903.885230510" lastFinishedPulling="2026-02-02 09:12:03.39646969 +0000 UTC m=+935.940808123" observedRunningTime="2026-02-02 09:12:03.875577476 +0000 UTC m=+936.419915919" watchObservedRunningTime="2026-02-02 09:12:03.881685689 +0000 UTC m=+936.426024122" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.899495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cfce7950-bdca-4938-9579-4abf3357d9f0","Type":"ContainerStarted","Data":"9f386dfe6401ee5844422d788ecab18505eeddf2662f5fb0ed36df96c395d832"} Feb 02 09:12:03 crc kubenswrapper[4747]: E0202 09:12:03.914115 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-sh4ft" podUID="11768358-bd3c-440c-ac71-1c1ad4436571" Feb 02 09:12:03 crc kubenswrapper[4747]: I0202 09:12:03.982869 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.355824 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e35ebb79-3f29-4695-af28-732b49057cfa" path="/var/lib/kubelet/pods/e35ebb79-3f29-4695-af28-732b49057cfa/volumes" Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.691534 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7b4dbd8489-wjfdd"] Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.919531 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tmgr2" event={"ID":"73275940-43ea-40ea-ba5f-5b8e25a35f62","Type":"ContainerStarted","Data":"73e7d55ef5533157a939da1439e9ccee720d4a1de5cfb2b5cfdbf9d1d547ba47"} Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.921880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b4dbd8489-wjfdd" event={"ID":"03e77cb3-3fbb-4dda-8293-f4ed73283262","Type":"ContainerStarted","Data":"bacb4070c9ab5dcf38f94f503edf06993f33b0fd01c73bead2f6865062fb06b1"} Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.924860 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cfce7950-bdca-4938-9579-4abf3357d9f0","Type":"ContainerStarted","Data":"0c10e955cfb68a859c2eab061a117c62ec17f54ace01c3558062a140c06db725"} Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.927037 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"494e1957-39a0-4704-b0d3-7475cf24178b","Type":"ContainerStarted","Data":"9016c0851c6fdf14fa1ff74e3f1252089083a7580b71dfe23376fb41cb220426"} Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.932575 4747 generic.go:334] "Generic (PLEG): container finished" podID="07972a25-4956-4f3d-b9be-0c555b1906df" containerID="173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1" exitCode=0 Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.932680 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ccgcp" event={"ID":"07972a25-4956-4f3d-b9be-0c555b1906df","Type":"ContainerDied","Data":"173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1"} Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.944677 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69bf4987b8-zq2rd" event={"ID":"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae","Type":"ContainerStarted","Data":"a0c42adcbe1996d24d8aebea7daa350ea0b388498a4ddae2b820374337d92f36"} Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.954480 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tmgr2" podStartSLOduration=14.95445539 podStartE2EDuration="14.95445539s" podCreationTimestamp="2026-02-02 09:11:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:04.941010373 +0000 UTC m=+937.485348816" watchObservedRunningTime="2026-02-02 09:12:04.95445539 +0000 UTC m=+937.498793823" Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.958807 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55c4f4d6c6-jtktd" event={"ID":"5760787c-4f47-4115-bd6d-12f036d73793","Type":"ContainerStarted","Data":"540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623"} Feb 02 09:12:04 crc kubenswrapper[4747]: I0202 09:12:04.959039 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55c4f4d6c6-jtktd" event={"ID":"5760787c-4f47-4115-bd6d-12f036d73793","Type":"ContainerStarted","Data":"143bc1289bd91924df2610001740ca9eced88ec813732e26ea6f5d0a300299e9"} Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.019295 4747 generic.go:334] "Generic (PLEG): container finished" podID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerID="aa884b2387b02a99dc9fd910a3d43a3b61fbf82288993573d87ba79303fa64ca" exitCode=0 Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.019383 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" event={"ID":"cf03e9ce-2830-49eb-ac1e-cacacd78e942","Type":"ContainerDied","Data":"aa884b2387b02a99dc9fd910a3d43a3b61fbf82288993573d87ba79303fa64ca"} Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.039656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7495bf65bd-857k2" event={"ID":"2860dde6-602b-417e-9819-6ce526ed2eb9","Type":"ContainerStarted","Data":"f7443913509b1d1fc8ae368bed4aab423e61a1b56cc666223497e8d462839b9a"} Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.780564 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m5674"] Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.782586 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.817989 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m5674"] Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.861619 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-catalog-content\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.861748 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-utilities\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.862251 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvdrx\" (UniqueName: \"kubernetes.io/projected/dc871587-061f-4dbd-8b3b-06c268e9adb0-kube-api-access-gvdrx\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.963398 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-utilities\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.963465 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvdrx\" (UniqueName: \"kubernetes.io/projected/dc871587-061f-4dbd-8b3b-06c268e9adb0-kube-api-access-gvdrx\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.963491 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-catalog-content\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.963924 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-catalog-content\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.964142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-utilities\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:05 crc kubenswrapper[4747]: I0202 09:12:05.989065 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvdrx\" (UniqueName: \"kubernetes.io/projected/dc871587-061f-4dbd-8b3b-06c268e9adb0-kube-api-access-gvdrx\") pod \"redhat-operators-m5674\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.057081 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" event={"ID":"cf03e9ce-2830-49eb-ac1e-cacacd78e942","Type":"ContainerStarted","Data":"938a8c54d2cc7d673819a7aca88270299626df03164ec37a87cb5db30e589996"} Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.057335 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.061554 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7495bf65bd-857k2" event={"ID":"2860dde6-602b-417e-9819-6ce526ed2eb9","Type":"ContainerStarted","Data":"6509828b0b0d9a8924bab9eb6c0658ca6a343b9381c03230f3547affad1cad4e"} Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.064842 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b4dbd8489-wjfdd" event={"ID":"03e77cb3-3fbb-4dda-8293-f4ed73283262","Type":"ContainerStarted","Data":"640cd96beda6a2402b48a48c7eba3997597527536cc9ad3d969289503765fbd3"} Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.064880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b4dbd8489-wjfdd" event={"ID":"03e77cb3-3fbb-4dda-8293-f4ed73283262","Type":"ContainerStarted","Data":"4b1c422fc70e0e5f80cae252a22d70243036db3e120b0f9d689ee56ea9c54126"} Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.064978 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.069972 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerStarted","Data":"19f562aec086b19976fada025d9bf69ae03fcca6e3f2d994e785fbf3c3ce5db6"} Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.073293 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69bf4987b8-zq2rd" event={"ID":"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae","Type":"ContainerStarted","Data":"b58beb9bccf8377ae3190cb61f1add3bdb034c8ba31eebd73a6e7366b3cb3328"} Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.076880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55c4f4d6c6-jtktd" event={"ID":"5760787c-4f47-4115-bd6d-12f036d73793","Type":"ContainerStarted","Data":"3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3"} Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.080603 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" podStartSLOduration=5.080586027 podStartE2EDuration="5.080586027s" podCreationTimestamp="2026-02-02 09:12:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:06.075881279 +0000 UTC m=+938.620219712" watchObservedRunningTime="2026-02-02 09:12:06.080586027 +0000 UTC m=+938.624924460" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.105716 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7495bf65bd-857k2" podStartSLOduration=27.481453456 podStartE2EDuration="28.105695076s" podCreationTimestamp="2026-02-02 09:11:38 +0000 UTC" firstStartedPulling="2026-02-02 09:12:03.28983946 +0000 UTC m=+935.834177893" lastFinishedPulling="2026-02-02 09:12:03.91408108 +0000 UTC m=+936.458419513" observedRunningTime="2026-02-02 09:12:06.098066225 +0000 UTC m=+938.642404658" watchObservedRunningTime="2026-02-02 09:12:06.105695076 +0000 UTC m=+938.650033509" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.127319 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7b4dbd8489-wjfdd" podStartSLOduration=3.1273042269999998 podStartE2EDuration="3.127304227s" podCreationTimestamp="2026-02-02 09:12:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:06.123358958 +0000 UTC m=+938.667697391" watchObservedRunningTime="2026-02-02 09:12:06.127304227 +0000 UTC m=+938.671642660" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.156703 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-69bf4987b8-zq2rd" podStartSLOduration=27.529213541 podStartE2EDuration="28.156681672s" podCreationTimestamp="2026-02-02 09:11:38 +0000 UTC" firstStartedPulling="2026-02-02 09:12:03.288778744 +0000 UTC m=+935.833117177" lastFinishedPulling="2026-02-02 09:12:03.916246875 +0000 UTC m=+936.460585308" observedRunningTime="2026-02-02 09:12:06.145380699 +0000 UTC m=+938.689719132" watchObservedRunningTime="2026-02-02 09:12:06.156681672 +0000 UTC m=+938.701020105" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.179498 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.179596 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-55c4f4d6c6-jtktd" podStartSLOduration=5.179574706 podStartE2EDuration="5.179574706s" podCreationTimestamp="2026-02-02 09:12:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:06.174895988 +0000 UTC m=+938.719234441" watchObservedRunningTime="2026-02-02 09:12:06.179574706 +0000 UTC m=+938.723913139" Feb 02 09:12:06 crc kubenswrapper[4747]: I0202 09:12:06.797655 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m5674"] Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.090797 4747 generic.go:334] "Generic (PLEG): container finished" podID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerID="305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6" exitCode=0 Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.091011 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5674" event={"ID":"dc871587-061f-4dbd-8b3b-06c268e9adb0","Type":"ContainerDied","Data":"305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6"} Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.091069 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5674" event={"ID":"dc871587-061f-4dbd-8b3b-06c268e9adb0","Type":"ContainerStarted","Data":"e7ccfebaca3418990d3c1725e56b37b03cfaff70f05b26112fa9d79f51da42d3"} Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.100131 4747 generic.go:334] "Generic (PLEG): container finished" podID="2551c83c-96c3-43d9-916d-04bf8bbaf85a" containerID="67bef59bf70c65c9d9b3820186fcb391a2c5d4ea690f558d792d64c05cea4ad7" exitCode=0 Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.100225 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kcg7d" event={"ID":"2551c83c-96c3-43d9-916d-04bf8bbaf85a","Type":"ContainerDied","Data":"67bef59bf70c65c9d9b3820186fcb391a2c5d4ea690f558d792d64c05cea4ad7"} Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.103303 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cfce7950-bdca-4938-9579-4abf3357d9f0","Type":"ContainerStarted","Data":"7f3b1a103d81886ff14d2f4ae28b712738e8be05cf6adf32319381795cead84b"} Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.113596 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"494e1957-39a0-4704-b0d3-7475cf24178b","Type":"ContainerStarted","Data":"09803ea36b706f383f5d28af6229736fc7052fab541590b73e91c1177bda5d40"} Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.119050 4747 generic.go:334] "Generic (PLEG): container finished" podID="07972a25-4956-4f3d-b9be-0c555b1906df" containerID="a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1" exitCode=0 Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.119207 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ccgcp" event={"ID":"07972a25-4956-4f3d-b9be-0c555b1906df","Type":"ContainerDied","Data":"a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1"} Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.120455 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.153640 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.153622304 podStartE2EDuration="7.153622304s" podCreationTimestamp="2026-02-02 09:12:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:07.153456099 +0000 UTC m=+939.697794522" watchObservedRunningTime="2026-02-02 09:12:07.153622304 +0000 UTC m=+939.697960737" Feb 02 09:12:07 crc kubenswrapper[4747]: I0202 09:12:07.197303 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.197285147 podStartE2EDuration="7.197285147s" podCreationTimestamp="2026-02-02 09:12:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:07.190396514 +0000 UTC m=+939.734734947" watchObservedRunningTime="2026-02-02 09:12:07.197285147 +0000 UTC m=+939.741623580" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.151924 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ccgcp" event={"ID":"07972a25-4956-4f3d-b9be-0c555b1906df","Type":"ContainerStarted","Data":"a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98"} Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.166875 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3486730-26af-4fe1-a379-b28af74eb1e8" containerID="5e7f6ea423e81d9a05cfb8f9a49014a60051694136eb02e7aae65a77a804dc38" exitCode=0 Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.167783 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7pc6t" event={"ID":"b3486730-26af-4fe1-a379-b28af74eb1e8","Type":"ContainerDied","Data":"5e7f6ea423e81d9a05cfb8f9a49014a60051694136eb02e7aae65a77a804dc38"} Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.205301 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ccgcp" podStartSLOduration=17.597975963 podStartE2EDuration="20.205284116s" podCreationTimestamp="2026-02-02 09:11:48 +0000 UTC" firstStartedPulling="2026-02-02 09:12:04.985096997 +0000 UTC m=+937.529435430" lastFinishedPulling="2026-02-02 09:12:07.59240511 +0000 UTC m=+940.136743583" observedRunningTime="2026-02-02 09:12:08.183043589 +0000 UTC m=+940.727382022" watchObservedRunningTime="2026-02-02 09:12:08.205284116 +0000 UTC m=+940.749622539" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.530478 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.530764 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.595963 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kcg7d" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.663998 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.664170 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.666405 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-combined-ca-bundle\") pod \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.666475 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnlbp\" (UniqueName: \"kubernetes.io/projected/2551c83c-96c3-43d9-916d-04bf8bbaf85a-kube-api-access-vnlbp\") pod \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.666559 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2551c83c-96c3-43d9-916d-04bf8bbaf85a-logs\") pod \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.666776 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-config-data\") pod \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.666806 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-scripts\") pod \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\" (UID: \"2551c83c-96c3-43d9-916d-04bf8bbaf85a\") " Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.673189 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2551c83c-96c3-43d9-916d-04bf8bbaf85a-logs" (OuterVolumeSpecName: "logs") pod "2551c83c-96c3-43d9-916d-04bf8bbaf85a" (UID: "2551c83c-96c3-43d9-916d-04bf8bbaf85a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.687188 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2551c83c-96c3-43d9-916d-04bf8bbaf85a-kube-api-access-vnlbp" (OuterVolumeSpecName: "kube-api-access-vnlbp") pod "2551c83c-96c3-43d9-916d-04bf8bbaf85a" (UID: "2551c83c-96c3-43d9-916d-04bf8bbaf85a"). InnerVolumeSpecName "kube-api-access-vnlbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.689063 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-scripts" (OuterVolumeSpecName: "scripts") pod "2551c83c-96c3-43d9-916d-04bf8bbaf85a" (UID: "2551c83c-96c3-43d9-916d-04bf8bbaf85a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.753083 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-config-data" (OuterVolumeSpecName: "config-data") pod "2551c83c-96c3-43d9-916d-04bf8bbaf85a" (UID: "2551c83c-96c3-43d9-916d-04bf8bbaf85a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.769070 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.769100 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnlbp\" (UniqueName: \"kubernetes.io/projected/2551c83c-96c3-43d9-916d-04bf8bbaf85a-kube-api-access-vnlbp\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.769113 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2551c83c-96c3-43d9-916d-04bf8bbaf85a-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.769121 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.791122 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2551c83c-96c3-43d9-916d-04bf8bbaf85a" (UID: "2551c83c-96c3-43d9-916d-04bf8bbaf85a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.870312 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2551c83c-96c3-43d9-916d-04bf8bbaf85a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.875487 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:12:08 crc kubenswrapper[4747]: I0202 09:12:08.876280 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.187483 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5674" event={"ID":"dc871587-061f-4dbd-8b3b-06c268e9adb0","Type":"ContainerStarted","Data":"e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab"} Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.193366 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kcg7d" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.195002 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kcg7d" event={"ID":"2551c83c-96c3-43d9-916d-04bf8bbaf85a","Type":"ContainerDied","Data":"03b786ca48e3acbefc0ee5b1535e102bfa1ce0f2358be2136bb5f3779423d091"} Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.195046 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03b786ca48e3acbefc0ee5b1535e102bfa1ce0f2358be2136bb5f3779423d091" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.237996 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-56bc7d5584-2ln7s"] Feb 02 09:12:09 crc kubenswrapper[4747]: E0202 09:12:09.238394 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2551c83c-96c3-43d9-916d-04bf8bbaf85a" containerName="placement-db-sync" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.238409 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2551c83c-96c3-43d9-916d-04bf8bbaf85a" containerName="placement-db-sync" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.238598 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2551c83c-96c3-43d9-916d-04bf8bbaf85a" containerName="placement-db-sync" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.239512 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.241725 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-xfd9p" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.241914 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.241917 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.242194 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.249194 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.276634 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-56bc7d5584-2ln7s"] Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.378841 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-logs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.379123 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-combined-ca-bundle\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.379159 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj7gz\" (UniqueName: \"kubernetes.io/projected/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-kube-api-access-vj7gz\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.379182 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-config-data\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.379345 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-scripts\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.379601 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-internal-tls-certs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.379638 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-public-tls-certs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.481900 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-logs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.481971 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-combined-ca-bundle\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.481995 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj7gz\" (UniqueName: \"kubernetes.io/projected/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-kube-api-access-vj7gz\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.482035 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-config-data\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.482084 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-scripts\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.482118 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-internal-tls-certs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.482135 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-public-tls-certs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.483318 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-logs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.487741 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-internal-tls-certs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.488233 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-combined-ca-bundle\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.488406 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-public-tls-certs\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.491188 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-scripts\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.505799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-config-data\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.510491 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj7gz\" (UniqueName: \"kubernetes.io/projected/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-kube-api-access-vj7gz\") pod \"placement-56bc7d5584-2ln7s\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.575356 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.669815 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.759343 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-ccgcp" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="registry-server" probeResult="failure" output=< Feb 02 09:12:09 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:12:09 crc kubenswrapper[4747]: > Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.786355 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx2xp\" (UniqueName: \"kubernetes.io/projected/b3486730-26af-4fe1-a379-b28af74eb1e8-kube-api-access-lx2xp\") pod \"b3486730-26af-4fe1-a379-b28af74eb1e8\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.786563 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-db-sync-config-data\") pod \"b3486730-26af-4fe1-a379-b28af74eb1e8\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.786598 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-combined-ca-bundle\") pod \"b3486730-26af-4fe1-a379-b28af74eb1e8\" (UID: \"b3486730-26af-4fe1-a379-b28af74eb1e8\") " Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.797715 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b3486730-26af-4fe1-a379-b28af74eb1e8" (UID: "b3486730-26af-4fe1-a379-b28af74eb1e8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.797823 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3486730-26af-4fe1-a379-b28af74eb1e8-kube-api-access-lx2xp" (OuterVolumeSpecName: "kube-api-access-lx2xp") pod "b3486730-26af-4fe1-a379-b28af74eb1e8" (UID: "b3486730-26af-4fe1-a379-b28af74eb1e8"). InnerVolumeSpecName "kube-api-access-lx2xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.836048 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3486730-26af-4fe1-a379-b28af74eb1e8" (UID: "b3486730-26af-4fe1-a379-b28af74eb1e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.914690 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.914985 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3486730-26af-4fe1-a379-b28af74eb1e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.915121 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx2xp\" (UniqueName: \"kubernetes.io/projected/b3486730-26af-4fe1-a379-b28af74eb1e8-kube-api-access-lx2xp\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:09 crc kubenswrapper[4747]: I0202 09:12:09.972473 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-56bc7d5584-2ln7s"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.205905 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56bc7d5584-2ln7s" event={"ID":"3fc551d2-4d3d-4a72-91e9-5197460ca5bd","Type":"ContainerStarted","Data":"892d754c334821579b9ca20ccde85cea1e68dcb30b98ab8d2595f96970eeb1ba"} Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.210798 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7pc6t" event={"ID":"b3486730-26af-4fe1-a379-b28af74eb1e8","Type":"ContainerDied","Data":"9674817cc03b0ae7edce2dd028da029611818cd616e3e044d864f93235a7e17e"} Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.210852 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9674817cc03b0ae7edce2dd028da029611818cd616e3e044d864f93235a7e17e" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.210918 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7pc6t" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.406831 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-848476d6cc-r4k78"] Feb 02 09:12:10 crc kubenswrapper[4747]: E0202 09:12:10.407208 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3486730-26af-4fe1-a379-b28af74eb1e8" containerName="barbican-db-sync" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.407224 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3486730-26af-4fe1-a379-b28af74eb1e8" containerName="barbican-db-sync" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.407462 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3486730-26af-4fe1-a379-b28af74eb1e8" containerName="barbican-db-sync" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.417464 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-848476d6cc-r4k78"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.417581 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.426003 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6f57f97874-kjzqz"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.427545 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.437118 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mjpkv" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.437609 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.437777 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.437835 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.479765 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f57f97874-kjzqz"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.491101 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-j22q5"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.491728 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="dnsmasq-dns" containerID="cri-o://938a8c54d2cc7d673819a7aca88270299626df03164ec37a87cb5db30e589996" gracePeriod=10 Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.501091 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539284 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data-custom\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539327 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-combined-ca-bundle\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539348 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539380 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-logs\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539399 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-combined-ca-bundle\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxfpz\" (UniqueName: \"kubernetes.io/projected/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-kube-api-access-jxfpz\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539481 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data-custom\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539515 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539533 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16f5c15b-20ec-4f65-b557-89867813005a-logs\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.539562 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p72r8\" (UniqueName: \"kubernetes.io/projected/16f5c15b-20ec-4f65-b557-89867813005a-kube-api-access-p72r8\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.555140 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-6f5vq"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.556663 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.560959 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-6f5vq"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.644269 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.644323 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16f5c15b-20ec-4f65-b557-89867813005a-logs\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.645860 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16f5c15b-20ec-4f65-b557-89867813005a-logs\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.647796 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p72r8\" (UniqueName: \"kubernetes.io/projected/16f5c15b-20ec-4f65-b557-89867813005a-kube-api-access-p72r8\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.647850 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92xjw\" (UniqueName: \"kubernetes.io/projected/6163cdad-08d0-436e-967b-fa422f3dc4cd-kube-api-access-92xjw\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.647912 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data-custom\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.647953 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.647992 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-combined-ca-bundle\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.648023 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.648089 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-logs\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.648131 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-combined-ca-bundle\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.648200 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxfpz\" (UniqueName: \"kubernetes.io/projected/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-kube-api-access-jxfpz\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.648313 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-config\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.655848 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-combined-ca-bundle\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.656404 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-logs\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.656506 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.656548 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data-custom\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.656588 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.656631 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.665546 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.666348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data-custom\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.668704 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-combined-ca-bundle\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.678760 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.679826 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data-custom\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.684870 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-85b5b7d9cb-m78r5"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.686319 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.691059 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxfpz\" (UniqueName: \"kubernetes.io/projected/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-kube-api-access-jxfpz\") pod \"barbican-keystone-listener-6f57f97874-kjzqz\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.695870 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.696862 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p72r8\" (UniqueName: \"kubernetes.io/projected/16f5c15b-20ec-4f65-b557-89867813005a-kube-api-access-p72r8\") pod \"barbican-worker-848476d6cc-r4k78\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.699228 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-85b5b7d9cb-m78r5"] Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.759081 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.759237 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92xjw\" (UniqueName: \"kubernetes.io/projected/6163cdad-08d0-436e-967b-fa422f3dc4cd-kube-api-access-92xjw\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.759299 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.759424 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-config\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.759622 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.759658 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.760632 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.760898 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.761241 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-config\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.761737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.764749 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.781718 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92xjw\" (UniqueName: \"kubernetes.io/projected/6163cdad-08d0-436e-967b-fa422f3dc4cd-kube-api-access-92xjw\") pod \"dnsmasq-dns-75c8ddd69c-6f5vq\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.788595 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.819106 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.861650 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-combined-ca-bundle\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.861728 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data-custom\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.861808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.861833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54c1d27f-e96c-425c-8b4c-08e7824abbf1-logs\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.861892 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcwp7\" (UniqueName: \"kubernetes.io/projected/54c1d27f-e96c-425c-8b4c-08e7824abbf1-kube-api-access-pcwp7\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.892728 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.963364 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-combined-ca-bundle\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.963423 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data-custom\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.963486 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.963505 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54c1d27f-e96c-425c-8b4c-08e7824abbf1-logs\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.963550 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcwp7\" (UniqueName: \"kubernetes.io/projected/54c1d27f-e96c-425c-8b4c-08e7824abbf1-kube-api-access-pcwp7\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.964074 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54c1d27f-e96c-425c-8b4c-08e7824abbf1-logs\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.968817 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.972866 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data-custom\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.972929 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-combined-ca-bundle\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:10 crc kubenswrapper[4747]: I0202 09:12:10.981260 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcwp7\" (UniqueName: \"kubernetes.io/projected/54c1d27f-e96c-425c-8b4c-08e7824abbf1-kube-api-access-pcwp7\") pod \"barbican-api-85b5b7d9cb-m78r5\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.063399 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.063473 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.087795 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.112149 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.132401 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.233621 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.233687 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56bc7d5584-2ln7s" event={"ID":"3fc551d2-4d3d-4a72-91e9-5197460ca5bd","Type":"ContainerStarted","Data":"1ed1f71086fdf558823b3b1b837d1ccdda87cdb4bb6f84eeec5707e072c14cdc"} Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.233719 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.250468 4747 generic.go:334] "Generic (PLEG): container finished" podID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerID="e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab" exitCode=0 Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.250563 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5674" event={"ID":"dc871587-061f-4dbd-8b3b-06c268e9adb0","Type":"ContainerDied","Data":"e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab"} Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.251705 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.251734 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.294630 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: I0202 09:12:11.322485 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 09:12:11 crc kubenswrapper[4747]: E0202 09:12:11.678408 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf03e9ce_2830_49eb_ac1e_cacacd78e942.slice/crio-conmon-938a8c54d2cc7d673819a7aca88270299626df03164ec37a87cb5db30e589996.scope\": RecentStats: unable to find data in memory cache]" Feb 02 09:12:12 crc kubenswrapper[4747]: I0202 09:12:12.266271 4747 generic.go:334] "Generic (PLEG): container finished" podID="73275940-43ea-40ea-ba5f-5b8e25a35f62" containerID="73e7d55ef5533157a939da1439e9ccee720d4a1de5cfb2b5cfdbf9d1d547ba47" exitCode=0 Feb 02 09:12:12 crc kubenswrapper[4747]: I0202 09:12:12.266333 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tmgr2" event={"ID":"73275940-43ea-40ea-ba5f-5b8e25a35f62","Type":"ContainerDied","Data":"73e7d55ef5533157a939da1439e9ccee720d4a1de5cfb2b5cfdbf9d1d547ba47"} Feb 02 09:12:12 crc kubenswrapper[4747]: I0202 09:12:12.271422 4747 generic.go:334] "Generic (PLEG): container finished" podID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerID="938a8c54d2cc7d673819a7aca88270299626df03164ec37a87cb5db30e589996" exitCode=0 Feb 02 09:12:12 crc kubenswrapper[4747]: I0202 09:12:12.271487 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" event={"ID":"cf03e9ce-2830-49eb-ac1e-cacacd78e942","Type":"ContainerDied","Data":"938a8c54d2cc7d673819a7aca88270299626df03164ec37a87cb5db30e589996"} Feb 02 09:12:12 crc kubenswrapper[4747]: I0202 09:12:12.272123 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 09:12:12 crc kubenswrapper[4747]: I0202 09:12:12.272152 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.246770 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-569bd9644b-d92kb"] Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.248915 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.250796 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.251943 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.266962 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-569bd9644b-d92kb"] Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.280433 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.280460 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.320991 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.321138 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32243a8-81da-4d79-927e-413df2383bd7-logs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.321207 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-public-tls-certs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.322242 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-internal-tls-certs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.322702 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b47hj\" (UniqueName: \"kubernetes.io/projected/a32243a8-81da-4d79-927e-413df2383bd7-kube-api-access-b47hj\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.322750 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data-custom\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.322841 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-combined-ca-bundle\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.423830 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32243a8-81da-4d79-927e-413df2383bd7-logs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.423890 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-public-tls-certs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.423919 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-internal-tls-certs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.423995 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b47hj\" (UniqueName: \"kubernetes.io/projected/a32243a8-81da-4d79-927e-413df2383bd7-kube-api-access-b47hj\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.424013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data-custom\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.424042 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-combined-ca-bundle\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.424067 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.425422 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32243a8-81da-4d79-927e-413df2383bd7-logs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.433325 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-combined-ca-bundle\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.434319 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-public-tls-certs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.434556 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.434633 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data-custom\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.443801 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-internal-tls-certs\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.446142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b47hj\" (UniqueName: \"kubernetes.io/projected/a32243a8-81da-4d79-927e-413df2383bd7-kube-api-access-b47hj\") pod \"barbican-api-569bd9644b-d92kb\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:13 crc kubenswrapper[4747]: I0202 09:12:13.588513 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:14 crc kubenswrapper[4747]: I0202 09:12:14.163321 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:14 crc kubenswrapper[4747]: I0202 09:12:14.311945 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 09:12:14 crc kubenswrapper[4747]: I0202 09:12:14.868653 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 09:12:15 crc kubenswrapper[4747]: I0202 09:12:15.033697 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 09:12:15 crc kubenswrapper[4747]: I0202 09:12:15.033845 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 09:12:15 crc kubenswrapper[4747]: I0202 09:12:15.034591 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 09:12:16 crc kubenswrapper[4747]: I0202 09:12:16.816292 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.151:5353: i/o timeout" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.316402 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.362654 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.365514 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tmgr2" event={"ID":"73275940-43ea-40ea-ba5f-5b8e25a35f62","Type":"ContainerDied","Data":"9cdc633c9dbeeffceb55582af31f2f1bc891c107d3774effda5c1f1bfe8446d7"} Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.378135 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cdc633c9dbeeffceb55582af31f2f1bc891c107d3774effda5c1f1bfe8446d7" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452081 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-sb\") pod \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452523 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc9qz\" (UniqueName: \"kubernetes.io/projected/cf03e9ce-2830-49eb-ac1e-cacacd78e942-kube-api-access-wc9qz\") pod \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452601 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-swift-storage-0\") pod \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452647 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-nb\") pod \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452702 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" event={"ID":"cf03e9ce-2830-49eb-ac1e-cacacd78e942","Type":"ContainerDied","Data":"cf64dd4376b9c8bfa6c49d919463613f7ac2706f0bdea9edfcde7603b4c47476"} Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452772 4747 scope.go:117] "RemoveContainer" containerID="938a8c54d2cc7d673819a7aca88270299626df03164ec37a87cb5db30e589996" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452777 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-svc\") pod \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.452841 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-config\") pod \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\" (UID: \"cf03e9ce-2830-49eb-ac1e-cacacd78e942\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.453005 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.459648 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf03e9ce-2830-49eb-ac1e-cacacd78e942-kube-api-access-wc9qz" (OuterVolumeSpecName: "kube-api-access-wc9qz") pod "cf03e9ce-2830-49eb-ac1e-cacacd78e942" (UID: "cf03e9ce-2830-49eb-ac1e-cacacd78e942"). InnerVolumeSpecName "kube-api-access-wc9qz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.495822 4747 scope.go:117] "RemoveContainer" containerID="aa884b2387b02a99dc9fd910a3d43a3b61fbf82288993573d87ba79303fa64ca" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.512030 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cf03e9ce-2830-49eb-ac1e-cacacd78e942" (UID: "cf03e9ce-2830-49eb-ac1e-cacacd78e942"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.547853 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-config" (OuterVolumeSpecName: "config") pod "cf03e9ce-2830-49eb-ac1e-cacacd78e942" (UID: "cf03e9ce-2830-49eb-ac1e-cacacd78e942"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.551250 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cf03e9ce-2830-49eb-ac1e-cacacd78e942" (UID: "cf03e9ce-2830-49eb-ac1e-cacacd78e942"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.557599 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-credential-keys\") pod \"73275940-43ea-40ea-ba5f-5b8e25a35f62\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.557723 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-config-data\") pod \"73275940-43ea-40ea-ba5f-5b8e25a35f62\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.557797 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-combined-ca-bundle\") pod \"73275940-43ea-40ea-ba5f-5b8e25a35f62\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.557833 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrk98\" (UniqueName: \"kubernetes.io/projected/73275940-43ea-40ea-ba5f-5b8e25a35f62-kube-api-access-qrk98\") pod \"73275940-43ea-40ea-ba5f-5b8e25a35f62\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.557872 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-fernet-keys\") pod \"73275940-43ea-40ea-ba5f-5b8e25a35f62\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.557896 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-scripts\") pod \"73275940-43ea-40ea-ba5f-5b8e25a35f62\" (UID: \"73275940-43ea-40ea-ba5f-5b8e25a35f62\") " Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.558464 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.558483 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.558492 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.558507 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc9qz\" (UniqueName: \"kubernetes.io/projected/cf03e9ce-2830-49eb-ac1e-cacacd78e942-kube-api-access-wc9qz\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.558805 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cf03e9ce-2830-49eb-ac1e-cacacd78e942" (UID: "cf03e9ce-2830-49eb-ac1e-cacacd78e942"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.563082 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "73275940-43ea-40ea-ba5f-5b8e25a35f62" (UID: "73275940-43ea-40ea-ba5f-5b8e25a35f62"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.563219 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73275940-43ea-40ea-ba5f-5b8e25a35f62-kube-api-access-qrk98" (OuterVolumeSpecName: "kube-api-access-qrk98") pod "73275940-43ea-40ea-ba5f-5b8e25a35f62" (UID: "73275940-43ea-40ea-ba5f-5b8e25a35f62"). InnerVolumeSpecName "kube-api-access-qrk98". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.563833 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cf03e9ce-2830-49eb-ac1e-cacacd78e942" (UID: "cf03e9ce-2830-49eb-ac1e-cacacd78e942"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.578055 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "73275940-43ea-40ea-ba5f-5b8e25a35f62" (UID: "73275940-43ea-40ea-ba5f-5b8e25a35f62"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.578131 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-scripts" (OuterVolumeSpecName: "scripts") pod "73275940-43ea-40ea-ba5f-5b8e25a35f62" (UID: "73275940-43ea-40ea-ba5f-5b8e25a35f62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.591779 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73275940-43ea-40ea-ba5f-5b8e25a35f62" (UID: "73275940-43ea-40ea-ba5f-5b8e25a35f62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.597768 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-config-data" (OuterVolumeSpecName: "config-data") pod "73275940-43ea-40ea-ba5f-5b8e25a35f62" (UID: "73275940-43ea-40ea-ba5f-5b8e25a35f62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662190 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662235 4747 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662246 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662256 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf03e9ce-2830-49eb-ac1e-cacacd78e942-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662268 4747 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662278 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662289 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73275940-43ea-40ea-ba5f-5b8e25a35f62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.662305 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrk98\" (UniqueName: \"kubernetes.io/projected/73275940-43ea-40ea-ba5f-5b8e25a35f62-kube-api-access-qrk98\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.727104 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-569bd9644b-d92kb"] Feb 02 09:12:17 crc kubenswrapper[4747]: W0202 09:12:17.733721 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54c1d27f_e96c_425c_8b4c_08e7824abbf1.slice/crio-f7656bc1134d60a2b3d2b88a56168b57e0385aa5cd77c9b18a920603c06bddd9 WatchSource:0}: Error finding container f7656bc1134d60a2b3d2b88a56168b57e0385aa5cd77c9b18a920603c06bddd9: Status 404 returned error can't find the container with id f7656bc1134d60a2b3d2b88a56168b57e0385aa5cd77c9b18a920603c06bddd9 Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.733880 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-85b5b7d9cb-m78r5"] Feb 02 09:12:17 crc kubenswrapper[4747]: W0202 09:12:17.735504 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda32243a8_81da_4d79_927e_413df2383bd7.slice/crio-832b0255e2095e3ff06b90271b1359ac05eab348c5358a9fa6ce5884705a4c91 WatchSource:0}: Error finding container 832b0255e2095e3ff06b90271b1359ac05eab348c5358a9fa6ce5884705a4c91: Status 404 returned error can't find the container with id 832b0255e2095e3ff06b90271b1359ac05eab348c5358a9fa6ce5884705a4c91 Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.810268 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-j22q5"] Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.821593 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-j22q5"] Feb 02 09:12:17 crc kubenswrapper[4747]: W0202 09:12:17.949609 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68e47ed3_c6ed_453b_a1eb_3fd9073c519c.slice/crio-4b0636762d5cf6cefc1350c45f2f795f257035b7ec9f8c35939852e19d17ecc7 WatchSource:0}: Error finding container 4b0636762d5cf6cefc1350c45f2f795f257035b7ec9f8c35939852e19d17ecc7: Status 404 returned error can't find the container with id 4b0636762d5cf6cefc1350c45f2f795f257035b7ec9f8c35939852e19d17ecc7 Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.963119 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f57f97874-kjzqz"] Feb 02 09:12:17 crc kubenswrapper[4747]: W0202 09:12:17.963268 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16f5c15b_20ec_4f65_b557_89867813005a.slice/crio-751d15a51319006d382fbe77378ad0ff0ca08f622546037bc3a4decab8efaed4 WatchSource:0}: Error finding container 751d15a51319006d382fbe77378ad0ff0ca08f622546037bc3a4decab8efaed4: Status 404 returned error can't find the container with id 751d15a51319006d382fbe77378ad0ff0ca08f622546037bc3a4decab8efaed4 Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.985546 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-848476d6cc-r4k78"] Feb 02 09:12:17 crc kubenswrapper[4747]: I0202 09:12:17.993596 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-6f5vq"] Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.355681 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" path="/var/lib/kubelet/pods/cf03e9ce-2830-49eb-ac1e-cacacd78e942/volumes" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.487876 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerStarted","Data":"1cbc64e8bd6a26ba6a4e2e9a50228b0b3f2f17f44e711b4dc13457c761349fc4"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.504273 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-848476d6cc-r4k78" event={"ID":"16f5c15b-20ec-4f65-b557-89867813005a","Type":"ContainerStarted","Data":"751d15a51319006d382fbe77378ad0ff0ca08f622546037bc3a4decab8efaed4"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.523383 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56bc7d5584-2ln7s" event={"ID":"3fc551d2-4d3d-4a72-91e9-5197460ca5bd","Type":"ContainerStarted","Data":"81bf19464dd98f6e2c94065a4f95f7e014cfff962b3053b15b09759f6cd0eb5b"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.524349 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.524571 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.535190 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.549475 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-559c565ccd-md2lx"] Feb 02 09:12:18 crc kubenswrapper[4747]: E0202 09:12:18.549872 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="init" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.549883 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="init" Feb 02 09:12:18 crc kubenswrapper[4747]: E0202 09:12:18.549909 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="dnsmasq-dns" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.549916 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="dnsmasq-dns" Feb 02 09:12:18 crc kubenswrapper[4747]: E0202 09:12:18.549926 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73275940-43ea-40ea-ba5f-5b8e25a35f62" containerName="keystone-bootstrap" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.549959 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="73275940-43ea-40ea-ba5f-5b8e25a35f62" containerName="keystone-bootstrap" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.550128 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="dnsmasq-dns" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.550154 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="73275940-43ea-40ea-ba5f-5b8e25a35f62" containerName="keystone-bootstrap" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.550751 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.556781 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.557024 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.564005 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sh4ft" event={"ID":"11768358-bd3c-440c-ac71-1c1ad4436571","Type":"ContainerStarted","Data":"f61d25e01ba47aff67e10bfa98efbe79fcc663e4dd181b3cc89ee20e7462b889"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.566880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" event={"ID":"68e47ed3-c6ed-453b-a1eb-3fd9073c519c","Type":"ContainerStarted","Data":"4b0636762d5cf6cefc1350c45f2f795f257035b7ec9f8c35939852e19d17ecc7"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.575796 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-559c565ccd-md2lx"] Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.575839 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5674" event={"ID":"dc871587-061f-4dbd-8b3b-06c268e9adb0","Type":"ContainerStarted","Data":"a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.585533 4747 generic.go:334] "Generic (PLEG): container finished" podID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerID="b68ee08bd5e55733b847e220a58b4025d99dd7013615fe2da81fccd6926727c8" exitCode=0 Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.585609 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" event={"ID":"6163cdad-08d0-436e-967b-fa422f3dc4cd","Type":"ContainerDied","Data":"b68ee08bd5e55733b847e220a58b4025d99dd7013615fe2da81fccd6926727c8"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.585633 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" event={"ID":"6163cdad-08d0-436e-967b-fa422f3dc4cd","Type":"ContainerStarted","Data":"cfdfef7ed9e3c852cb4fdc37716bb7c09f92ba9503cb9c4640dbbdefc3858163"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.585761 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-56bc7d5584-2ln7s" podStartSLOduration=9.585742557 podStartE2EDuration="9.585742557s" podCreationTimestamp="2026-02-02 09:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:18.55633425 +0000 UTC m=+951.100672683" watchObservedRunningTime="2026-02-02 09:12:18.585742557 +0000 UTC m=+951.130080990" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.601790 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85b5b7d9cb-m78r5" event={"ID":"54c1d27f-e96c-425c-8b4c-08e7824abbf1","Type":"ContainerStarted","Data":"59b63418f6fcd716296359e6ad3dfc5723e18cb59edc197a0d47e37da7047e06"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.602009 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85b5b7d9cb-m78r5" event={"ID":"54c1d27f-e96c-425c-8b4c-08e7824abbf1","Type":"ContainerStarted","Data":"f7656bc1134d60a2b3d2b88a56168b57e0385aa5cd77c9b18a920603c06bddd9"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.603074 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.603133 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.632972 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tmgr2" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.633083 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-569bd9644b-d92kb" event={"ID":"a32243a8-81da-4d79-927e-413df2383bd7","Type":"ContainerStarted","Data":"7311969c15e641d1484292bb2aa2c5f540049b368724df6feb3c3555ced64982"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.633120 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-569bd9644b-d92kb" event={"ID":"a32243a8-81da-4d79-927e-413df2383bd7","Type":"ContainerStarted","Data":"832b0255e2095e3ff06b90271b1359ac05eab348c5358a9fa6ce5884705a4c91"} Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.661539 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-sh4ft" podStartSLOduration=3.874143992 podStartE2EDuration="49.661518794s" podCreationTimestamp="2026-02-02 09:11:29 +0000 UTC" firstStartedPulling="2026-02-02 09:11:31.389072199 +0000 UTC m=+903.933410632" lastFinishedPulling="2026-02-02 09:12:17.176447001 +0000 UTC m=+949.720785434" observedRunningTime="2026-02-02 09:12:18.599200454 +0000 UTC m=+951.143538887" watchObservedRunningTime="2026-02-02 09:12:18.661518794 +0000 UTC m=+951.205857227" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.682498 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-internal-tls-certs\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.682575 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-public-tls-certs\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.682633 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-fernet-keys\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.682735 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nscpn\" (UniqueName: \"kubernetes.io/projected/693c0cdd-1115-4a68-9f3e-4437ce703788-kube-api-access-nscpn\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.682766 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-scripts\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.682818 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-config-data\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.682843 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-combined-ca-bundle\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.683029 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-credential-keys\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.686575 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m5674" podStartSLOduration=3.393564469 podStartE2EDuration="13.686555221s" podCreationTimestamp="2026-02-02 09:12:05 +0000 UTC" firstStartedPulling="2026-02-02 09:12:07.094573965 +0000 UTC m=+939.638912398" lastFinishedPulling="2026-02-02 09:12:17.387564727 +0000 UTC m=+949.931903150" observedRunningTime="2026-02-02 09:12:18.61943579 +0000 UTC m=+951.163774233" watchObservedRunningTime="2026-02-02 09:12:18.686555221 +0000 UTC m=+951.230893654" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.712621 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-85b5b7d9cb-m78r5" podStartSLOduration=8.712602403 podStartE2EDuration="8.712602403s" podCreationTimestamp="2026-02-02 09:12:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:18.665984776 +0000 UTC m=+951.210323209" watchObservedRunningTime="2026-02-02 09:12:18.712602403 +0000 UTC m=+951.256940836" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.718108 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-569bd9644b-d92kb" podStartSLOduration=5.71809146 podStartE2EDuration="5.71809146s" podCreationTimestamp="2026-02-02 09:12:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:18.689558646 +0000 UTC m=+951.233897099" watchObservedRunningTime="2026-02-02 09:12:18.71809146 +0000 UTC m=+951.262429893" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785030 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-internal-tls-certs\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785092 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-public-tls-certs\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785118 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-fernet-keys\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785188 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nscpn\" (UniqueName: \"kubernetes.io/projected/693c0cdd-1115-4a68-9f3e-4437ce703788-kube-api-access-nscpn\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785214 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-scripts\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785262 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-config-data\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785287 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-combined-ca-bundle\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.785417 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-credential-keys\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.811093 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-scripts\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.813434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-fernet-keys\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.815641 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-combined-ca-bundle\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.823822 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-internal-tls-certs\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.837104 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-config-data\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.838321 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-public-tls-certs\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.851709 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/693c0cdd-1115-4a68-9f3e-4437ce703788-credential-keys\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.855440 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nscpn\" (UniqueName: \"kubernetes.io/projected/693c0cdd-1115-4a68-9f3e-4437ce703788-kube-api-access-nscpn\") pod \"keystone-559c565ccd-md2lx\" (UID: \"693c0cdd-1115-4a68-9f3e-4437ce703788\") " pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.876495 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7495bf65bd-857k2" podUID="2860dde6-602b-417e-9819-6ce526ed2eb9" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Feb 02 09:12:18 crc kubenswrapper[4747]: I0202 09:12:18.884372 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.470712 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-559c565ccd-md2lx"] Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.655929 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" event={"ID":"6163cdad-08d0-436e-967b-fa422f3dc4cd","Type":"ContainerStarted","Data":"12baec122d866eb59ec6f7c4a2c06f07157e219fdaeaa777a32cf5f9afebd513"} Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.656340 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.658069 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-559c565ccd-md2lx" event={"ID":"693c0cdd-1115-4a68-9f3e-4437ce703788","Type":"ContainerStarted","Data":"ff6c59afc43b2bdad26a3f02759a347c591beabdd812c43f9807833af6d1d789"} Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.677456 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" podStartSLOduration=9.677442251 podStartE2EDuration="9.677442251s" podCreationTimestamp="2026-02-02 09:12:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:19.673596815 +0000 UTC m=+952.217935248" watchObservedRunningTime="2026-02-02 09:12:19.677442251 +0000 UTC m=+952.221780674" Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.684370 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85b5b7d9cb-m78r5" event={"ID":"54c1d27f-e96c-425c-8b4c-08e7824abbf1","Type":"ContainerStarted","Data":"966f9bf536dc151a5a99ab19ea0d06b7244a411883e71aee805766f439e98c19"} Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.695694 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-569bd9644b-d92kb" event={"ID":"a32243a8-81da-4d79-927e-413df2383bd7","Type":"ContainerStarted","Data":"b4d673c2f38a726f1dbacd3bfff838dd6524d79039863542833197782321a394"} Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.696499 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.696530 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:19 crc kubenswrapper[4747]: I0202 09:12:19.756359 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-ccgcp" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="registry-server" probeResult="failure" output=< Feb 02 09:12:19 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:12:19 crc kubenswrapper[4747]: > Feb 02 09:12:20 crc kubenswrapper[4747]: I0202 09:12:20.210951 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:20 crc kubenswrapper[4747]: I0202 09:12:20.518495 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:12:20 crc kubenswrapper[4747]: I0202 09:12:20.518554 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:12:20 crc kubenswrapper[4747]: I0202 09:12:20.719395 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-559c565ccd-md2lx" event={"ID":"693c0cdd-1115-4a68-9f3e-4437ce703788","Type":"ContainerStarted","Data":"b6786f0a624ae4aa6dbe22717e8791daefaebc079329043424d2eb6070fd13fe"} Feb 02 09:12:20 crc kubenswrapper[4747]: I0202 09:12:20.741198 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-559c565ccd-md2lx" podStartSLOduration=2.741177856 podStartE2EDuration="2.741177856s" podCreationTimestamp="2026-02-02 09:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:20.733547015 +0000 UTC m=+953.277885448" watchObservedRunningTime="2026-02-02 09:12:20.741177856 +0000 UTC m=+953.285516289" Feb 02 09:12:21 crc kubenswrapper[4747]: I0202 09:12:21.728837 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:21 crc kubenswrapper[4747]: I0202 09:12:21.817741 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-84b966f6c9-j22q5" podUID="cf03e9ce-2830-49eb-ac1e-cacacd78e942" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.151:5353: i/o timeout" Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.741169 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-848476d6cc-r4k78" event={"ID":"16f5c15b-20ec-4f65-b557-89867813005a","Type":"ContainerStarted","Data":"b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354"} Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.742372 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-848476d6cc-r4k78" event={"ID":"16f5c15b-20ec-4f65-b557-89867813005a","Type":"ContainerStarted","Data":"eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974"} Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.745599 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" event={"ID":"68e47ed3-c6ed-453b-a1eb-3fd9073c519c","Type":"ContainerStarted","Data":"5408a980eb0055401b6491bdce5df77f5df5b3e44524804fcddbedb6ce9b75b8"} Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.745723 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" event={"ID":"68e47ed3-c6ed-453b-a1eb-3fd9073c519c","Type":"ContainerStarted","Data":"ee0fe79f015ef9137c12a826f435ef739ca34c5a09c1a8ada9a4f78df37dd739"} Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.777191 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-848476d6cc-r4k78" podStartSLOduration=9.231496435 podStartE2EDuration="12.777172904s" podCreationTimestamp="2026-02-02 09:12:10 +0000 UTC" firstStartedPulling="2026-02-02 09:12:18.001509298 +0000 UTC m=+950.545847731" lastFinishedPulling="2026-02-02 09:12:21.547185777 +0000 UTC m=+954.091524200" observedRunningTime="2026-02-02 09:12:22.759496751 +0000 UTC m=+955.303835204" watchObservedRunningTime="2026-02-02 09:12:22.777172904 +0000 UTC m=+955.321511337" Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.781192 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" podStartSLOduration=9.187633737 podStartE2EDuration="12.781179324s" podCreationTimestamp="2026-02-02 09:12:10 +0000 UTC" firstStartedPulling="2026-02-02 09:12:17.958926462 +0000 UTC m=+950.503264895" lastFinishedPulling="2026-02-02 09:12:21.552472039 +0000 UTC m=+954.096810482" observedRunningTime="2026-02-02 09:12:22.775787569 +0000 UTC m=+955.320126022" watchObservedRunningTime="2026-02-02 09:12:22.781179324 +0000 UTC m=+955.325517747" Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.878970 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-569dcfffb-qv7m8"] Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.880361 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:22 crc kubenswrapper[4747]: I0202 09:12:22.920064 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-569dcfffb-qv7m8"] Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.010801 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-config-data\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.011397 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-combined-ca-bundle\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.011682 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-logs\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.011755 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-config-data-custom\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.011847 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzhfw\" (UniqueName: \"kubernetes.io/projected/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-kube-api-access-nzhfw\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.017873 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5bd47ccdc7-8zwk7"] Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.022076 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.027838 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5bd47ccdc7-8zwk7"] Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.079186 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-85b5b7d9cb-m78r5"] Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.079403 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-85b5b7d9cb-m78r5" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api-log" containerID="cri-o://59b63418f6fcd716296359e6ad3dfc5723e18cb59edc197a0d47e37da7047e06" gracePeriod=30 Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.080270 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-85b5b7d9cb-m78r5" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api" containerID="cri-o://966f9bf536dc151a5a99ab19ea0d06b7244a411883e71aee805766f439e98c19" gracePeriod=30 Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.101641 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85b5b7d9cb-m78r5" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:34750->10.217.0.159:9311: read: connection reset by peer" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.101714 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85b5b7d9cb-m78r5" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": EOF" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.112374 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6cf448654b-n4qqr"] Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.113852 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.114842 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-config-data\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.114970 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-combined-ca-bundle\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.115045 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-logs\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.115073 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-config-data-custom\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.115102 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzhfw\" (UniqueName: \"kubernetes.io/projected/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-kube-api-access-nzhfw\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.116916 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-logs\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.125597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-config-data\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.147593 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-config-data-custom\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.149440 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-combined-ca-bundle\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.160444 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6cf448654b-n4qqr"] Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.162561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzhfw\" (UniqueName: \"kubernetes.io/projected/e2cbf327-44a0-4a40-8bf5-ef350dba55b7-kube-api-access-nzhfw\") pod \"barbican-keystone-listener-569dcfffb-qv7m8\" (UID: \"e2cbf327-44a0-4a40-8bf5-ef350dba55b7\") " pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.209618 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.217026 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-public-tls-certs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.217223 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwpcg\" (UniqueName: \"kubernetes.io/projected/8f344eda-fa92-4465-9749-057b27fc8741-kube-api-access-cwpcg\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.217344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-config-data\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.218147 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f344eda-fa92-4465-9749-057b27fc8741-logs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.218281 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-internal-tls-certs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.219155 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-config-data\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.219229 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-config-data-custom\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.220000 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-combined-ca-bundle\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.220147 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7k46\" (UniqueName: \"kubernetes.io/projected/7ccfda3d-1736-4cb1-a059-3e5508f95148-kube-api-access-s7k46\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.220211 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-config-data-custom\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.220344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ccfda3d-1736-4cb1-a059-3e5508f95148-logs\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.220383 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-combined-ca-bundle\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322329 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-config-data\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322370 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f344eda-fa92-4465-9749-057b27fc8741-logs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322404 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-internal-tls-certs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322426 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-config-data\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322450 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-config-data-custom\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322468 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-combined-ca-bundle\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322517 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7k46\" (UniqueName: \"kubernetes.io/projected/7ccfda3d-1736-4cb1-a059-3e5508f95148-kube-api-access-s7k46\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322551 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-config-data-custom\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322574 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ccfda3d-1736-4cb1-a059-3e5508f95148-logs\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322597 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-combined-ca-bundle\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322647 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-public-tls-certs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.322668 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwpcg\" (UniqueName: \"kubernetes.io/projected/8f344eda-fa92-4465-9749-057b27fc8741-kube-api-access-cwpcg\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.323475 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f344eda-fa92-4465-9749-057b27fc8741-logs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.329391 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-combined-ca-bundle\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.331609 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-config-data\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.332069 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-config-data\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.332508 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-internal-tls-certs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.333002 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-combined-ca-bundle\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.334340 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-public-tls-certs\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.334677 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ccfda3d-1736-4cb1-a059-3e5508f95148-logs\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.345118 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ccfda3d-1736-4cb1-a059-3e5508f95148-config-data-custom\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.350367 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f344eda-fa92-4465-9749-057b27fc8741-config-data-custom\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.361630 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwpcg\" (UniqueName: \"kubernetes.io/projected/8f344eda-fa92-4465-9749-057b27fc8741-kube-api-access-cwpcg\") pod \"barbican-api-6cf448654b-n4qqr\" (UID: \"8f344eda-fa92-4465-9749-057b27fc8741\") " pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.362906 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7k46\" (UniqueName: \"kubernetes.io/projected/7ccfda3d-1736-4cb1-a059-3e5508f95148-kube-api-access-s7k46\") pod \"barbican-worker-5bd47ccdc7-8zwk7\" (UID: \"7ccfda3d-1736-4cb1-a059-3e5508f95148\") " pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.369446 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.535637 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.767595 4747 generic.go:334] "Generic (PLEG): container finished" podID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerID="59b63418f6fcd716296359e6ad3dfc5723e18cb59edc197a0d47e37da7047e06" exitCode=143 Feb 02 09:12:23 crc kubenswrapper[4747]: I0202 09:12:23.767679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85b5b7d9cb-m78r5" event={"ID":"54c1d27f-e96c-425c-8b4c-08e7824abbf1","Type":"ContainerDied","Data":"59b63418f6fcd716296359e6ad3dfc5723e18cb59edc197a0d47e37da7047e06"} Feb 02 09:12:24 crc kubenswrapper[4747]: I0202 09:12:24.776473 4747 generic.go:334] "Generic (PLEG): container finished" podID="11768358-bd3c-440c-ac71-1c1ad4436571" containerID="f61d25e01ba47aff67e10bfa98efbe79fcc663e4dd181b3cc89ee20e7462b889" exitCode=0 Feb 02 09:12:24 crc kubenswrapper[4747]: I0202 09:12:24.776514 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sh4ft" event={"ID":"11768358-bd3c-440c-ac71-1c1ad4436571","Type":"ContainerDied","Data":"f61d25e01ba47aff67e10bfa98efbe79fcc663e4dd181b3cc89ee20e7462b889"} Feb 02 09:12:25 crc kubenswrapper[4747]: I0202 09:12:25.339751 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:25 crc kubenswrapper[4747]: I0202 09:12:25.368257 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:25 crc kubenswrapper[4747]: I0202 09:12:25.894689 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:25 crc kubenswrapper[4747]: I0202 09:12:25.968597 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-ft6lc"] Feb 02 09:12:25 crc kubenswrapper[4747]: I0202 09:12:25.968899 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" podUID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerName="dnsmasq-dns" containerID="cri-o://32ad24a5e0e49c42a7bdec6cefb4a9d070d02099eec449fe668dd627d86be33c" gracePeriod=10 Feb 02 09:12:26 crc kubenswrapper[4747]: I0202 09:12:26.181006 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:26 crc kubenswrapper[4747]: I0202 09:12:26.182268 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:12:26 crc kubenswrapper[4747]: I0202 09:12:26.802496 4747 generic.go:334] "Generic (PLEG): container finished" podID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerID="32ad24a5e0e49c42a7bdec6cefb4a9d070d02099eec449fe668dd627d86be33c" exitCode=0 Feb 02 09:12:26 crc kubenswrapper[4747]: I0202 09:12:26.802580 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" event={"ID":"3ecf0f91-10b8-40bf-8b3f-55917d1002d2","Type":"ContainerDied","Data":"32ad24a5e0e49c42a7bdec6cefb4a9d070d02099eec449fe668dd627d86be33c"} Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.244622 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.251514 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m5674" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" probeResult="failure" output=< Feb 02 09:12:27 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:12:27 crc kubenswrapper[4747]: > Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.423385 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-scripts\") pod \"11768358-bd3c-440c-ac71-1c1ad4436571\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.423458 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv9m4\" (UniqueName: \"kubernetes.io/projected/11768358-bd3c-440c-ac71-1c1ad4436571-kube-api-access-pv9m4\") pod \"11768358-bd3c-440c-ac71-1c1ad4436571\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.423561 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-db-sync-config-data\") pod \"11768358-bd3c-440c-ac71-1c1ad4436571\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.423611 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-config-data\") pod \"11768358-bd3c-440c-ac71-1c1ad4436571\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.423669 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11768358-bd3c-440c-ac71-1c1ad4436571-etc-machine-id\") pod \"11768358-bd3c-440c-ac71-1c1ad4436571\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.423758 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-combined-ca-bundle\") pod \"11768358-bd3c-440c-ac71-1c1ad4436571\" (UID: \"11768358-bd3c-440c-ac71-1c1ad4436571\") " Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.424108 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/11768358-bd3c-440c-ac71-1c1ad4436571-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "11768358-bd3c-440c-ac71-1c1ad4436571" (UID: "11768358-bd3c-440c-ac71-1c1ad4436571"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.424510 4747 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/11768358-bd3c-440c-ac71-1c1ad4436571-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.430901 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11768358-bd3c-440c-ac71-1c1ad4436571-kube-api-access-pv9m4" (OuterVolumeSpecName: "kube-api-access-pv9m4") pod "11768358-bd3c-440c-ac71-1c1ad4436571" (UID: "11768358-bd3c-440c-ac71-1c1ad4436571"). InnerVolumeSpecName "kube-api-access-pv9m4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.434116 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-scripts" (OuterVolumeSpecName: "scripts") pod "11768358-bd3c-440c-ac71-1c1ad4436571" (UID: "11768358-bd3c-440c-ac71-1c1ad4436571"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.435738 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "11768358-bd3c-440c-ac71-1c1ad4436571" (UID: "11768358-bd3c-440c-ac71-1c1ad4436571"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.466805 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11768358-bd3c-440c-ac71-1c1ad4436571" (UID: "11768358-bd3c-440c-ac71-1c1ad4436571"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.504579 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-config-data" (OuterVolumeSpecName: "config-data") pod "11768358-bd3c-440c-ac71-1c1ad4436571" (UID: "11768358-bd3c-440c-ac71-1c1ad4436571"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.528744 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.528785 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.528800 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.528815 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv9m4\" (UniqueName: \"kubernetes.io/projected/11768358-bd3c-440c-ac71-1c1ad4436571-kube-api-access-pv9m4\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.528829 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11768358-bd3c-440c-ac71-1c1ad4436571-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.812913 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sh4ft" event={"ID":"11768358-bd3c-440c-ac71-1c1ad4436571","Type":"ContainerDied","Data":"61a75b27ae044602808956d421dc01e3d1c7bc4707f98b8b04d904617d4db055"} Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.812974 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61a75b27ae044602808956d421dc01e3d1c7bc4707f98b8b04d904617d4db055" Feb 02 09:12:27 crc kubenswrapper[4747]: I0202 09:12:27.813000 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sh4ft" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.096675 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.239100 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-sb\") pod \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.239592 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-config\") pod \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.239724 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-svc\") pod \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.239787 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-nb\") pod \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.239862 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-swift-storage-0\") pod \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.239953 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdn7h\" (UniqueName: \"kubernetes.io/projected/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-kube-api-access-sdn7h\") pod \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\" (UID: \"3ecf0f91-10b8-40bf-8b3f-55917d1002d2\") " Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.289000 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-kube-api-access-sdn7h" (OuterVolumeSpecName: "kube-api-access-sdn7h") pod "3ecf0f91-10b8-40bf-8b3f-55917d1002d2" (UID: "3ecf0f91-10b8-40bf-8b3f-55917d1002d2"). InnerVolumeSpecName "kube-api-access-sdn7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.330462 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3ecf0f91-10b8-40bf-8b3f-55917d1002d2" (UID: "3ecf0f91-10b8-40bf-8b3f-55917d1002d2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.377774 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3ecf0f91-10b8-40bf-8b3f-55917d1002d2" (UID: "3ecf0f91-10b8-40bf-8b3f-55917d1002d2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.381103 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.381131 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdn7h\" (UniqueName: \"kubernetes.io/projected/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-kube-api-access-sdn7h\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.381141 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.393477 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-config" (OuterVolumeSpecName: "config") pod "3ecf0f91-10b8-40bf-8b3f-55917d1002d2" (UID: "3ecf0f91-10b8-40bf-8b3f-55917d1002d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.482248 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3ecf0f91-10b8-40bf-8b3f-55917d1002d2" (UID: "3ecf0f91-10b8-40bf-8b3f-55917d1002d2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.482716 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.482741 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.488436 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3ecf0f91-10b8-40bf-8b3f-55917d1002d2" (UID: "3ecf0f91-10b8-40bf-8b3f-55917d1002d2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.501994 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:28 crc kubenswrapper[4747]: E0202 09:12:28.502378 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11768358-bd3c-440c-ac71-1c1ad4436571" containerName="cinder-db-sync" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.502390 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="11768358-bd3c-440c-ac71-1c1ad4436571" containerName="cinder-db-sync" Feb 02 09:12:28 crc kubenswrapper[4747]: E0202 09:12:28.502404 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerName="init" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.502410 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerName="init" Feb 02 09:12:28 crc kubenswrapper[4747]: E0202 09:12:28.502435 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerName="dnsmasq-dns" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.502441 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerName="dnsmasq-dns" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.502602 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" containerName="dnsmasq-dns" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.502615 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="11768358-bd3c-440c-ac71-1c1ad4436571" containerName="cinder-db-sync" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.503516 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.514096 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85b5b7d9cb-m78r5" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:34752->10.217.0.159:9311: read: connection reset by peer" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.514351 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85b5b7d9cb-m78r5" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:34768->10.217.0.159:9311: read: connection reset by peer" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.515097 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-h5hpn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.515263 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.515379 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.515781 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.516212 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.527404 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.527863 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-pq9cn"] Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.543246 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.604451 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ecf0f91-10b8-40bf-8b3f-55917d1002d2-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.618502 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-pq9cn"] Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714381 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714429 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714466 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29ee5c95-68dc-4d84-8422-e39996aa1c51-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714491 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714521 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjvm2\" (UniqueName: \"kubernetes.io/projected/7615847e-cebb-4f8a-a453-7ae866963464-kube-api-access-bjvm2\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714548 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-svc\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714571 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714595 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-scripts\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714621 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714653 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714674 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-config\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.714717 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4blbx\" (UniqueName: \"kubernetes.io/projected/29ee5c95-68dc-4d84-8422-e39996aa1c51-kube-api-access-4blbx\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.756986 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5bd47ccdc7-8zwk7"] Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.795635 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.797567 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.802743 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.833673 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837088 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837124 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837146 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29ee5c95-68dc-4d84-8422-e39996aa1c51-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837168 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837192 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjvm2\" (UniqueName: \"kubernetes.io/projected/7615847e-cebb-4f8a-a453-7ae866963464-kube-api-access-bjvm2\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837214 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-svc\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837234 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837252 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-scripts\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837278 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837304 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837328 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-config\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.837362 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4blbx\" (UniqueName: \"kubernetes.io/projected/29ee5c95-68dc-4d84-8422-e39996aa1c51-kube-api-access-4blbx\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.838418 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.838919 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.840324 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-svc\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.840812 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.840840 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29ee5c95-68dc-4d84-8422-e39996aa1c51-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.852017 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-config\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.853780 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.854764 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.867803 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.867866 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.888472 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-scripts\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.890269 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7495bf65bd-857k2" podUID="2860dde6-602b-417e-9819-6ce526ed2eb9" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.898243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4blbx\" (UniqueName: \"kubernetes.io/projected/29ee5c95-68dc-4d84-8422-e39996aa1c51-kube-api-access-4blbx\") pod \"cinder-scheduler-0\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.911679 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjvm2\" (UniqueName: \"kubernetes.io/projected/7615847e-cebb-4f8a-a453-7ae866963464-kube-api-access-bjvm2\") pod \"dnsmasq-dns-5784cf869f-pq9cn\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.914867 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" event={"ID":"3ecf0f91-10b8-40bf-8b3f-55917d1002d2","Type":"ContainerDied","Data":"a909b211227c0631d2b8c5906b1232d331d601054983be16ecf21b8c12ce42b6"} Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.914910 4747 scope.go:117] "RemoveContainer" containerID="32ad24a5e0e49c42a7bdec6cefb4a9d070d02099eec449fe668dd627d86be33c" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.915087 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-ft6lc" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.921073 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.939054 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.939113 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.939164 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data-custom\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.939212 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-scripts\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.939246 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.939294 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-logs\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.939311 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxpks\" (UniqueName: \"kubernetes.io/projected/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-kube-api-access-rxpks\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.949765 4747 generic.go:334] "Generic (PLEG): container finished" podID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerID="966f9bf536dc151a5a99ab19ea0d06b7244a411883e71aee805766f439e98c19" exitCode=0 Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.949808 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85b5b7d9cb-m78r5" event={"ID":"54c1d27f-e96c-425c-8b4c-08e7824abbf1","Type":"ContainerDied","Data":"966f9bf536dc151a5a99ab19ea0d06b7244a411883e71aee805766f439e98c19"} Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.979869 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-ft6lc"] Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.986537 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.986829 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:28 crc kubenswrapper[4747]: I0202 09:12:28.999525 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-ft6lc"] Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.040451 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.040517 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.040584 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data-custom\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.040620 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-scripts\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.040661 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.040729 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-logs\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.040757 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxpks\" (UniqueName: \"kubernetes.io/projected/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-kube-api-access-rxpks\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.041009 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.041598 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-logs\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.045262 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-scripts\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.045678 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.046304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.047619 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data-custom\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.063700 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxpks\" (UniqueName: \"kubernetes.io/projected/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-kube-api-access-rxpks\") pod \"cinder-api-0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.095062 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ccgcp"] Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.195698 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.731649 4747 scope.go:117] "RemoveContainer" containerID="2f5dec28f6909a79ae567f2a054c882000337229cd0e10c6b805722e9f2fca2a" Feb 02 09:12:29 crc kubenswrapper[4747]: I0202 09:12:29.995811 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" event={"ID":"7ccfda3d-1736-4cb1-a059-3e5508f95148","Type":"ContainerStarted","Data":"2f44220384cba207a1015eccbaaaf30b96c37c0e1099e3cab033104d33a55736"} Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.153414 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.239013 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6cf448654b-n4qqr"] Feb 02 09:12:30 crc kubenswrapper[4747]: E0202 09:12:30.258232 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.266504 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data\") pod \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.266584 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data-custom\") pod \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.266685 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-combined-ca-bundle\") pod \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.266755 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54c1d27f-e96c-425c-8b4c-08e7824abbf1-logs\") pod \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.266781 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcwp7\" (UniqueName: \"kubernetes.io/projected/54c1d27f-e96c-425c-8b4c-08e7824abbf1-kube-api-access-pcwp7\") pod \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\" (UID: \"54c1d27f-e96c-425c-8b4c-08e7824abbf1\") " Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.270320 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54c1d27f-e96c-425c-8b4c-08e7824abbf1-logs" (OuterVolumeSpecName: "logs") pod "54c1d27f-e96c-425c-8b4c-08e7824abbf1" (UID: "54c1d27f-e96c-425c-8b4c-08e7824abbf1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.281062 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "54c1d27f-e96c-425c-8b4c-08e7824abbf1" (UID: "54c1d27f-e96c-425c-8b4c-08e7824abbf1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.281200 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54c1d27f-e96c-425c-8b4c-08e7824abbf1-kube-api-access-pcwp7" (OuterVolumeSpecName: "kube-api-access-pcwp7") pod "54c1d27f-e96c-425c-8b4c-08e7824abbf1" (UID: "54c1d27f-e96c-425c-8b4c-08e7824abbf1"). InnerVolumeSpecName "kube-api-access-pcwp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.357627 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54c1d27f-e96c-425c-8b4c-08e7824abbf1" (UID: "54c1d27f-e96c-425c-8b4c-08e7824abbf1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.368420 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.368793 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.368803 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54c1d27f-e96c-425c-8b4c-08e7824abbf1-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.368813 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcwp7\" (UniqueName: \"kubernetes.io/projected/54c1d27f-e96c-425c-8b4c-08e7824abbf1-kube-api-access-pcwp7\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.406662 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ecf0f91-10b8-40bf-8b3f-55917d1002d2" path="/var/lib/kubelet/pods/3ecf0f91-10b8-40bf-8b3f-55917d1002d2/volumes" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.410689 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data" (OuterVolumeSpecName: "config-data") pod "54c1d27f-e96c-425c-8b4c-08e7824abbf1" (UID: "54c1d27f-e96c-425c-8b4c-08e7824abbf1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.471002 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54c1d27f-e96c-425c-8b4c-08e7824abbf1-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.768124 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-569dcfffb-qv7m8"] Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.791447 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-pq9cn"] Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.809392 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.856092 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:30 crc kubenswrapper[4747]: I0202 09:12:30.951568 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.020510 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" event={"ID":"7615847e-cebb-4f8a-a453-7ae866963464","Type":"ContainerStarted","Data":"e88c40de8be7f9cc4247b640786b8a71078ad958c852db929460d35bd5a1d5c9"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.026508 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85b5b7d9cb-m78r5" event={"ID":"54c1d27f-e96c-425c-8b4c-08e7824abbf1","Type":"ContainerDied","Data":"f7656bc1134d60a2b3d2b88a56168b57e0385aa5cd77c9b18a920603c06bddd9"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.026561 4747 scope.go:117] "RemoveContainer" containerID="966f9bf536dc151a5a99ab19ea0d06b7244a411883e71aee805766f439e98c19" Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.026712 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85b5b7d9cb-m78r5" Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.036407 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" event={"ID":"e2cbf327-44a0-4a40-8bf5-ef350dba55b7","Type":"ContainerStarted","Data":"f98af4dae7a446a701362f7cb9c8d2721cf3f5a7641f7d6717a4907118f00cef"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.046193 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"03cfbc6d-1ce4-41fa-8050-28462f48a2a0","Type":"ContainerStarted","Data":"1c50b690eddbfc174d099a947fdb63a610f24ef8a4ec2a6e80922b373d66f656"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.062803 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerStarted","Data":"80ac2be38105a798aa593487588617d0777b3764849898a16cada88b6657d438"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.063033 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="ceilometer-notification-agent" containerID="cri-o://19f562aec086b19976fada025d9bf69ae03fcca6e3f2d994e785fbf3c3ce5db6" gracePeriod=30 Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.063090 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.063120 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="sg-core" containerID="cri-o://1cbc64e8bd6a26ba6a4e2e9a50228b0b3f2f17f44e711b4dc13457c761349fc4" gracePeriod=30 Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.063135 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="proxy-httpd" containerID="cri-o://80ac2be38105a798aa593487588617d0777b3764849898a16cada88b6657d438" gracePeriod=30 Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.077977 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29ee5c95-68dc-4d84-8422-e39996aa1c51","Type":"ContainerStarted","Data":"44d2d88f2481d261fe693b22669665dd90b30b4eed57f36a00df301cd1f962de"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.080163 4747 scope.go:117] "RemoveContainer" containerID="59b63418f6fcd716296359e6ad3dfc5723e18cb59edc197a0d47e37da7047e06" Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.082392 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" event={"ID":"7ccfda3d-1736-4cb1-a059-3e5508f95148","Type":"ContainerStarted","Data":"8dbe1747e2c64748c2f779fc97ccf0f1ddd50c09c89e7b62bc9a974af7e3f8a9"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.082417 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" event={"ID":"7ccfda3d-1736-4cb1-a059-3e5508f95148","Type":"ContainerStarted","Data":"359e23ba79b0bf20edf280c2566a0bb55c89ad3e7cb60797de102f607118d7d3"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.102618 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-85b5b7d9cb-m78r5"] Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.114415 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cf448654b-n4qqr" event={"ID":"8f344eda-fa92-4465-9749-057b27fc8741","Type":"ContainerStarted","Data":"af35beea1c7208ef73fab5cda35d087f9094c53990034a64e36fdb99bf8f70d1"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.114488 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cf448654b-n4qqr" event={"ID":"8f344eda-fa92-4465-9749-057b27fc8741","Type":"ContainerStarted","Data":"7e9770f234677c4b5ff92773b9695a2dc4c6b03bd7305b3e63ae9c7e81691df1"} Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.114546 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ccgcp" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="registry-server" containerID="cri-o://a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98" gracePeriod=2 Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.126321 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-85b5b7d9cb-m78r5"] Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.182974 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5bd47ccdc7-8zwk7" podStartSLOduration=9.182951532 podStartE2EDuration="9.182951532s" podCreationTimestamp="2026-02-02 09:12:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:31.114528119 +0000 UTC m=+963.658866552" watchObservedRunningTime="2026-02-02 09:12:31.182951532 +0000 UTC m=+963.727289965" Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.206248 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-848476d6cc-r4k78"] Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.206511 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-848476d6cc-r4k78" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker-log" containerID="cri-o://eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974" gracePeriod=30 Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.206962 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-848476d6cc-r4k78" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker" containerID="cri-o://b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354" gracePeriod=30 Feb 02 09:12:31 crc kubenswrapper[4747]: I0202 09:12:31.958551 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.085468 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.145796 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-catalog-content\") pod \"07972a25-4956-4f3d-b9be-0c555b1906df\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.145872 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kwtb\" (UniqueName: \"kubernetes.io/projected/07972a25-4956-4f3d-b9be-0c555b1906df-kube-api-access-9kwtb\") pod \"07972a25-4956-4f3d-b9be-0c555b1906df\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.145900 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-utilities\") pod \"07972a25-4956-4f3d-b9be-0c555b1906df\" (UID: \"07972a25-4956-4f3d-b9be-0c555b1906df\") " Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.146897 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-utilities" (OuterVolumeSpecName: "utilities") pod "07972a25-4956-4f3d-b9be-0c555b1906df" (UID: "07972a25-4956-4f3d-b9be-0c555b1906df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.161487 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07972a25-4956-4f3d-b9be-0c555b1906df-kube-api-access-9kwtb" (OuterVolumeSpecName: "kube-api-access-9kwtb") pod "07972a25-4956-4f3d-b9be-0c555b1906df" (UID: "07972a25-4956-4f3d-b9be-0c555b1906df"). InnerVolumeSpecName "kube-api-access-9kwtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.182174 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07972a25-4956-4f3d-b9be-0c555b1906df" (UID: "07972a25-4956-4f3d-b9be-0c555b1906df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.193586 4747 generic.go:334] "Generic (PLEG): container finished" podID="07972a25-4956-4f3d-b9be-0c555b1906df" containerID="a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98" exitCode=0 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.193675 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ccgcp" event={"ID":"07972a25-4956-4f3d-b9be-0c555b1906df","Type":"ContainerDied","Data":"a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.193700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ccgcp" event={"ID":"07972a25-4956-4f3d-b9be-0c555b1906df","Type":"ContainerDied","Data":"ad1f442fc414e7a5f9cae8d5279d55098303585581bba8973173c99a72f3ecab"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.193720 4747 scope.go:117] "RemoveContainer" containerID="a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.193847 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ccgcp" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.211664 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7b4dbd8489-wjfdd"] Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.212373 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7b4dbd8489-wjfdd" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-api" containerID="cri-o://4b1c422fc70e0e5f80cae252a22d70243036db3e120b0f9d689ee56ea9c54126" gracePeriod=30 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.212977 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7b4dbd8489-wjfdd" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-httpd" containerID="cri-o://640cd96beda6a2402b48a48c7eba3997597527536cc9ad3d969289503765fbd3" gracePeriod=30 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.247810 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.247853 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kwtb\" (UniqueName: \"kubernetes.io/projected/07972a25-4956-4f3d-b9be-0c555b1906df-kube-api-access-9kwtb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.247870 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07972a25-4956-4f3d-b9be-0c555b1906df-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.263649 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" event={"ID":"e2cbf327-44a0-4a40-8bf5-ef350dba55b7","Type":"ContainerStarted","Data":"d4536d9aabc6b1a8d456715f8e0ae64694a74d32247138b3f23a28d4fbf14bcf"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.263699 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" event={"ID":"e2cbf327-44a0-4a40-8bf5-ef350dba55b7","Type":"ContainerStarted","Data":"f8dd0e43cbc72fb1f808545940f2dedf9343e196b1883aeac208a38718a54e5c"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.323052 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7fb8f585c-hlmn8"] Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.323466 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="extract-utilities" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.323479 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="extract-utilities" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.323489 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="extract-content" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.323497 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="extract-content" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.323522 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="registry-server" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.323531 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="registry-server" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.323541 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.323548 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.323566 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api-log" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.323575 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api-log" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.324065 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" containerName="registry-server" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.324092 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.324102 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" containerName="barbican-api-log" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.324282 4747 generic.go:334] "Generic (PLEG): container finished" podID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerID="1cbc64e8bd6a26ba6a4e2e9a50228b0b3f2f17f44e711b4dc13457c761349fc4" exitCode=2 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.327098 4747 generic.go:334] "Generic (PLEG): container finished" podID="16f5c15b-20ec-4f65-b557-89867813005a" containerID="eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974" exitCode=143 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.335827 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.336117 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-7b4dbd8489-wjfdd" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.153:9696/\": read tcp 10.217.0.2:34034->10.217.0.153:9696: read: connection reset by peer" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.335653 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerDied","Data":"1cbc64e8bd6a26ba6a4e2e9a50228b0b3f2f17f44e711b4dc13457c761349fc4"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.336801 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-848476d6cc-r4k78" event={"ID":"16f5c15b-20ec-4f65-b557-89867813005a","Type":"ContainerDied","Data":"eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.390613 4747 scope.go:117] "RemoveContainer" containerID="a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.440994 4747 generic.go:334] "Generic (PLEG): container finished" podID="7615847e-cebb-4f8a-a453-7ae866963464" containerID="25d0928c2ccecefa42e3a7bebc3783b98e3ab7ded40aa37b406d59c26bd44309" exitCode=0 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.456325 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-public-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.456718 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-combined-ca-bundle\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.457867 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-ovndb-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.458440 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-config\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.459139 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-httpd-config\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.459628 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqpvc\" (UniqueName: \"kubernetes.io/projected/884e89eb-4a79-4086-8e3c-bc521ff7db35-kube-api-access-mqpvc\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.459822 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-internal-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.468706 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c1d27f-e96c-425c-8b4c-08e7824abbf1" path="/var/lib/kubelet/pods/54c1d27f-e96c-425c-8b4c-08e7824abbf1/volumes" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.469497 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.469540 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.469558 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cf448654b-n4qqr" event={"ID":"8f344eda-fa92-4465-9749-057b27fc8741","Type":"ContainerStarted","Data":"270e4a2a6dff4f6bc897e11c7d209c76bdadcfdfd5b01f4264c3210fffa82036"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.469579 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" event={"ID":"7615847e-cebb-4f8a-a453-7ae866963464","Type":"ContainerDied","Data":"25d0928c2ccecefa42e3a7bebc3783b98e3ab7ded40aa37b406d59c26bd44309"} Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.488446 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7fb8f585c-hlmn8"] Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.499084 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-569dcfffb-qv7m8" podStartSLOduration=10.499063325 podStartE2EDuration="10.499063325s" podCreationTimestamp="2026-02-02 09:12:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:32.293779945 +0000 UTC m=+964.838118388" watchObservedRunningTime="2026-02-02 09:12:32.499063325 +0000 UTC m=+965.043401758" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.503976 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07972a25_4956_4f3d_b9be_0c555b1906df.slice/crio-ad1f442fc414e7a5f9cae8d5279d55098303585581bba8973173c99a72f3ecab\": RecentStats: unable to find data in memory cache]" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.518991 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-6f57f97874-kjzqz"] Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.519254 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener-log" containerID="cri-o://ee0fe79f015ef9137c12a826f435ef739ca34c5a09c1a8ada9a4f78df37dd739" gracePeriod=30 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.519857 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener" containerID="cri-o://5408a980eb0055401b6491bdce5df77f5df5b3e44524804fcddbedb6ce9b75b8" gracePeriod=30 Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.563027 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6cf448654b-n4qqr" podStartSLOduration=9.563001416 podStartE2EDuration="9.563001416s" podCreationTimestamp="2026-02-02 09:12:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:32.431416331 +0000 UTC m=+964.975754764" watchObservedRunningTime="2026-02-02 09:12:32.563001416 +0000 UTC m=+965.107339849" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.564184 4747 scope.go:117] "RemoveContainer" containerID="173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.566113 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-config\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.566158 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-httpd-config\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.566236 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqpvc\" (UniqueName: \"kubernetes.io/projected/884e89eb-4a79-4086-8e3c-bc521ff7db35-kube-api-access-mqpvc\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.566290 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-internal-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.566379 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-public-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.566427 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-combined-ca-bundle\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.566511 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-ovndb-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.572072 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-httpd-config\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.572371 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-public-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.574675 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-config\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.575722 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-combined-ca-bundle\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.576744 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-ovndb-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.577783 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/884e89eb-4a79-4086-8e3c-bc521ff7db35-internal-tls-certs\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.591003 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqpvc\" (UniqueName: \"kubernetes.io/projected/884e89eb-4a79-4086-8e3c-bc521ff7db35-kube-api-access-mqpvc\") pod \"neutron-7fb8f585c-hlmn8\" (UID: \"884e89eb-4a79-4086-8e3c-bc521ff7db35\") " pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.595100 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ccgcp"] Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.602105 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ccgcp"] Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.617146 4747 scope.go:117] "RemoveContainer" containerID="a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.621278 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98\": container with ID starting with a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98 not found: ID does not exist" containerID="a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.621319 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98"} err="failed to get container status \"a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98\": rpc error: code = NotFound desc = could not find container \"a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98\": container with ID starting with a1e3e0c409b9da098d79e11af7013e757b0540504eeecf00bd8ff547e286fc98 not found: ID does not exist" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.621343 4747 scope.go:117] "RemoveContainer" containerID="a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.623292 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1\": container with ID starting with a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1 not found: ID does not exist" containerID="a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.623365 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1"} err="failed to get container status \"a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1\": rpc error: code = NotFound desc = could not find container \"a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1\": container with ID starting with a605569d0bca7516c06f25c607487d2848edeef4117f0c50ce12e0b4ed9f86b1 not found: ID does not exist" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.623398 4747 scope.go:117] "RemoveContainer" containerID="173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1" Feb 02 09:12:32 crc kubenswrapper[4747]: E0202 09:12:32.624681 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1\": container with ID starting with 173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1 not found: ID does not exist" containerID="173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.624713 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1"} err="failed to get container status \"173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1\": rpc error: code = NotFound desc = could not find container \"173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1\": container with ID starting with 173722ef33acbb7e124a695ffae51bfce1344c89fca4da1248db820fa1c01cf1 not found: ID does not exist" Feb 02 09:12:32 crc kubenswrapper[4747]: I0202 09:12:32.808578 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.124798 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.177537 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16f5c15b-20ec-4f65-b557-89867813005a-logs\") pod \"16f5c15b-20ec-4f65-b557-89867813005a\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.177611 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data\") pod \"16f5c15b-20ec-4f65-b557-89867813005a\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.177654 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p72r8\" (UniqueName: \"kubernetes.io/projected/16f5c15b-20ec-4f65-b557-89867813005a-kube-api-access-p72r8\") pod \"16f5c15b-20ec-4f65-b557-89867813005a\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.177701 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data-custom\") pod \"16f5c15b-20ec-4f65-b557-89867813005a\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.177736 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-combined-ca-bundle\") pod \"16f5c15b-20ec-4f65-b557-89867813005a\" (UID: \"16f5c15b-20ec-4f65-b557-89867813005a\") " Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.178923 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16f5c15b-20ec-4f65-b557-89867813005a-logs" (OuterVolumeSpecName: "logs") pod "16f5c15b-20ec-4f65-b557-89867813005a" (UID: "16f5c15b-20ec-4f65-b557-89867813005a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.193611 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "16f5c15b-20ec-4f65-b557-89867813005a" (UID: "16f5c15b-20ec-4f65-b557-89867813005a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.205827 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16f5c15b-20ec-4f65-b557-89867813005a-kube-api-access-p72r8" (OuterVolumeSpecName: "kube-api-access-p72r8") pod "16f5c15b-20ec-4f65-b557-89867813005a" (UID: "16f5c15b-20ec-4f65-b557-89867813005a"). InnerVolumeSpecName "kube-api-access-p72r8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.257051 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16f5c15b-20ec-4f65-b557-89867813005a" (UID: "16f5c15b-20ec-4f65-b557-89867813005a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.283406 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.283450 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16f5c15b-20ec-4f65-b557-89867813005a-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.283464 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p72r8\" (UniqueName: \"kubernetes.io/projected/16f5c15b-20ec-4f65-b557-89867813005a-kube-api-access-p72r8\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.283479 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.318233 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data" (OuterVolumeSpecName: "config-data") pod "16f5c15b-20ec-4f65-b557-89867813005a" (UID: "16f5c15b-20ec-4f65-b557-89867813005a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.389280 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16f5c15b-20ec-4f65-b557-89867813005a-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.464720 4747 generic.go:334] "Generic (PLEG): container finished" podID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerID="ee0fe79f015ef9137c12a826f435ef739ca34c5a09c1a8ada9a4f78df37dd739" exitCode=143 Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.464807 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" event={"ID":"68e47ed3-c6ed-453b-a1eb-3fd9073c519c","Type":"ContainerDied","Data":"ee0fe79f015ef9137c12a826f435ef739ca34c5a09c1a8ada9a4f78df37dd739"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.466911 4747 generic.go:334] "Generic (PLEG): container finished" podID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerID="640cd96beda6a2402b48a48c7eba3997597527536cc9ad3d969289503765fbd3" exitCode=0 Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.467017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b4dbd8489-wjfdd" event={"ID":"03e77cb3-3fbb-4dda-8293-f4ed73283262","Type":"ContainerDied","Data":"640cd96beda6a2402b48a48c7eba3997597527536cc9ad3d969289503765fbd3"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.468850 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"03cfbc6d-1ce4-41fa-8050-28462f48a2a0","Type":"ContainerStarted","Data":"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.471123 4747 generic.go:334] "Generic (PLEG): container finished" podID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerID="80ac2be38105a798aa593487588617d0777b3764849898a16cada88b6657d438" exitCode=0 Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.471234 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerDied","Data":"80ac2be38105a798aa593487588617d0777b3764849898a16cada88b6657d438"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.477388 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29ee5c95-68dc-4d84-8422-e39996aa1c51","Type":"ContainerStarted","Data":"d54509da86677af39b62966a4fd76e5b22515bf4e726a95cebba7762f692c5b8"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.481841 4747 generic.go:334] "Generic (PLEG): container finished" podID="16f5c15b-20ec-4f65-b557-89867813005a" containerID="b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354" exitCode=0 Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.481917 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-848476d6cc-r4k78" event={"ID":"16f5c15b-20ec-4f65-b557-89867813005a","Type":"ContainerDied","Data":"b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.481970 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-848476d6cc-r4k78" event={"ID":"16f5c15b-20ec-4f65-b557-89867813005a","Type":"ContainerDied","Data":"751d15a51319006d382fbe77378ad0ff0ca08f622546037bc3a4decab8efaed4"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.481993 4747 scope.go:117] "RemoveContainer" containerID="b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.482122 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-848476d6cc-r4k78" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.495265 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" event={"ID":"7615847e-cebb-4f8a-a453-7ae866963464","Type":"ContainerStarted","Data":"d8df1ae2a13c094d440863515caa194ba0e3d04a95db19c5adb8dd64e26b683b"} Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.496004 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.504576 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7fb8f585c-hlmn8"] Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.523905 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" podStartSLOduration=5.523884795 podStartE2EDuration="5.523884795s" podCreationTimestamp="2026-02-02 09:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:33.523491885 +0000 UTC m=+966.067830318" watchObservedRunningTime="2026-02-02 09:12:33.523884795 +0000 UTC m=+966.068223228" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.530422 4747 scope.go:117] "RemoveContainer" containerID="eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.565971 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-848476d6cc-r4k78"] Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.578804 4747 scope.go:117] "RemoveContainer" containerID="b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354" Feb 02 09:12:33 crc kubenswrapper[4747]: E0202 09:12:33.580348 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354\": container with ID starting with b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354 not found: ID does not exist" containerID="b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.580377 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354"} err="failed to get container status \"b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354\": rpc error: code = NotFound desc = could not find container \"b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354\": container with ID starting with b872844989c106d769590d3df6ef71c0e70b6430b8d166f1bf14172fde088354 not found: ID does not exist" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.580398 4747 scope.go:117] "RemoveContainer" containerID="eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974" Feb 02 09:12:33 crc kubenswrapper[4747]: E0202 09:12:33.587686 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974\": container with ID starting with eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974 not found: ID does not exist" containerID="eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.587729 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974"} err="failed to get container status \"eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974\": rpc error: code = NotFound desc = could not find container \"eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974\": container with ID starting with eba84d69b6a6be064800cb137f4dedb70f8c52b664047eb4be318941e9671974 not found: ID does not exist" Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.606549 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-848476d6cc-r4k78"] Feb 02 09:12:33 crc kubenswrapper[4747]: I0202 09:12:33.984070 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-7b4dbd8489-wjfdd" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.153:9696/\": dial tcp 10.217.0.153:9696: connect: connection refused" Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.363885 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07972a25-4956-4f3d-b9be-0c555b1906df" path="/var/lib/kubelet/pods/07972a25-4956-4f3d-b9be-0c555b1906df/volumes" Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.364746 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16f5c15b-20ec-4f65-b557-89867813005a" path="/var/lib/kubelet/pods/16f5c15b-20ec-4f65-b557-89867813005a/volumes" Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.504953 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8f585c-hlmn8" event={"ID":"884e89eb-4a79-4086-8e3c-bc521ff7db35","Type":"ContainerStarted","Data":"8f3f95c58b9f8a7cce4612d0535e4106664a2022a267d7b7726ec37e9c7c7c7f"} Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.505219 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8f585c-hlmn8" event={"ID":"884e89eb-4a79-4086-8e3c-bc521ff7db35","Type":"ContainerStarted","Data":"68c66abb7b15abe82f417d4cd6e48111c7a1b5fb8eaf26d82cedb7e22119b042"} Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.505229 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7fb8f585c-hlmn8" event={"ID":"884e89eb-4a79-4086-8e3c-bc521ff7db35","Type":"ContainerStarted","Data":"5f8283b45362aebf0ae6f560066f0ffa299a28451e247d7fab9ff2bf00711abe"} Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.505622 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.507367 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"03cfbc6d-1ce4-41fa-8050-28462f48a2a0","Type":"ContainerStarted","Data":"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e"} Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.507424 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api-log" containerID="cri-o://155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964" gracePeriod=30 Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.507444 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api" containerID="cri-o://18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e" gracePeriod=30 Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.507466 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.516027 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29ee5c95-68dc-4d84-8422-e39996aa1c51","Type":"ContainerStarted","Data":"65328239a3adb669e1bb8b09edfcc8c219f5cb8f315b741b7000609e9d908dd0"} Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.530847 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7fb8f585c-hlmn8" podStartSLOduration=2.530827688 podStartE2EDuration="2.530827688s" podCreationTimestamp="2026-02-02 09:12:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:34.524267593 +0000 UTC m=+967.068606026" watchObservedRunningTime="2026-02-02 09:12:34.530827688 +0000 UTC m=+967.075166121" Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.557428 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.27601625 podStartE2EDuration="6.557408053s" podCreationTimestamp="2026-02-02 09:12:28 +0000 UTC" firstStartedPulling="2026-02-02 09:12:30.860118179 +0000 UTC m=+963.404456612" lastFinishedPulling="2026-02-02 09:12:32.141509982 +0000 UTC m=+964.685848415" observedRunningTime="2026-02-02 09:12:34.5524759 +0000 UTC m=+967.096814343" watchObservedRunningTime="2026-02-02 09:12:34.557408053 +0000 UTC m=+967.101746486" Feb 02 09:12:34 crc kubenswrapper[4747]: I0202 09:12:34.578533 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.578513562 podStartE2EDuration="6.578513562s" podCreationTimestamp="2026-02-02 09:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:34.578337827 +0000 UTC m=+967.122676260" watchObservedRunningTime="2026-02-02 09:12:34.578513562 +0000 UTC m=+967.122852005" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.173000 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236173 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-etc-machine-id\") pod \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236238 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxpks\" (UniqueName: \"kubernetes.io/projected/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-kube-api-access-rxpks\") pod \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236295 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "03cfbc6d-1ce4-41fa-8050-28462f48a2a0" (UID: "03cfbc6d-1ce4-41fa-8050-28462f48a2a0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236333 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-combined-ca-bundle\") pod \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236353 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-logs\") pod \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236440 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-scripts\") pod \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236486 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data\") pod \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.236598 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data-custom\") pod \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\" (UID: \"03cfbc6d-1ce4-41fa-8050-28462f48a2a0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.237058 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-logs" (OuterVolumeSpecName: "logs") pod "03cfbc6d-1ce4-41fa-8050-28462f48a2a0" (UID: "03cfbc6d-1ce4-41fa-8050-28462f48a2a0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.237518 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.237541 4747 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.243817 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "03cfbc6d-1ce4-41fa-8050-28462f48a2a0" (UID: "03cfbc6d-1ce4-41fa-8050-28462f48a2a0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.245065 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-scripts" (OuterVolumeSpecName: "scripts") pod "03cfbc6d-1ce4-41fa-8050-28462f48a2a0" (UID: "03cfbc6d-1ce4-41fa-8050-28462f48a2a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.263032 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-kube-api-access-rxpks" (OuterVolumeSpecName: "kube-api-access-rxpks") pod "03cfbc6d-1ce4-41fa-8050-28462f48a2a0" (UID: "03cfbc6d-1ce4-41fa-8050-28462f48a2a0"). InnerVolumeSpecName "kube-api-access-rxpks". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.270270 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03cfbc6d-1ce4-41fa-8050-28462f48a2a0" (UID: "03cfbc6d-1ce4-41fa-8050-28462f48a2a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.285988 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data" (OuterVolumeSpecName: "config-data") pod "03cfbc6d-1ce4-41fa-8050-28462f48a2a0" (UID: "03cfbc6d-1ce4-41fa-8050-28462f48a2a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.342703 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.342773 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxpks\" (UniqueName: \"kubernetes.io/projected/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-kube-api-access-rxpks\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.342785 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.342794 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.342802 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cfbc6d-1ce4-41fa-8050-28462f48a2a0-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.526584 4747 generic.go:334] "Generic (PLEG): container finished" podID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerID="18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e" exitCode=0 Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.526833 4747 generic.go:334] "Generic (PLEG): container finished" podID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerID="155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964" exitCode=143 Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.526634 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"03cfbc6d-1ce4-41fa-8050-28462f48a2a0","Type":"ContainerDied","Data":"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e"} Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.526668 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.526895 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"03cfbc6d-1ce4-41fa-8050-28462f48a2a0","Type":"ContainerDied","Data":"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964"} Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.526919 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"03cfbc6d-1ce4-41fa-8050-28462f48a2a0","Type":"ContainerDied","Data":"1c50b690eddbfc174d099a947fdb63a610f24ef8a4ec2a6e80922b373d66f656"} Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.526947 4747 scope.go:117] "RemoveContainer" containerID="18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.534846 4747 generic.go:334] "Generic (PLEG): container finished" podID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerID="19f562aec086b19976fada025d9bf69ae03fcca6e3f2d994e785fbf3c3ce5db6" exitCode=0 Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.534928 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerDied","Data":"19f562aec086b19976fada025d9bf69ae03fcca6e3f2d994e785fbf3c3ce5db6"} Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.559100 4747 scope.go:117] "RemoveContainer" containerID="155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.569082 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.584489 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.591975 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595238 4747 scope.go:117] "RemoveContainer" containerID="18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e" Feb 02 09:12:35 crc kubenswrapper[4747]: E0202 09:12:35.595267 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api-log" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595301 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api-log" Feb 02 09:12:35 crc kubenswrapper[4747]: E0202 09:12:35.595320 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker-log" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595329 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker-log" Feb 02 09:12:35 crc kubenswrapper[4747]: E0202 09:12:35.595354 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595362 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api" Feb 02 09:12:35 crc kubenswrapper[4747]: E0202 09:12:35.595390 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595399 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595631 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595646 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595654 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="16f5c15b-20ec-4f65-b557-89867813005a" containerName="barbican-worker-log" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.595665 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" containerName="cinder-api-log" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.596587 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: E0202 09:12:35.596983 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e\": container with ID starting with 18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e not found: ID does not exist" containerID="18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.597031 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e"} err="failed to get container status \"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e\": rpc error: code = NotFound desc = could not find container \"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e\": container with ID starting with 18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e not found: ID does not exist" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.597066 4747 scope.go:117] "RemoveContainer" containerID="155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964" Feb 02 09:12:35 crc kubenswrapper[4747]: E0202 09:12:35.597405 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964\": container with ID starting with 155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964 not found: ID does not exist" containerID="155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.597432 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964"} err="failed to get container status \"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964\": rpc error: code = NotFound desc = could not find container \"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964\": container with ID starting with 155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964 not found: ID does not exist" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.597454 4747 scope.go:117] "RemoveContainer" containerID="18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.606582 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.607119 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e"} err="failed to get container status \"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e\": rpc error: code = NotFound desc = could not find container \"18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e\": container with ID starting with 18045020a2254a3705af19351e5defd4eebf6f1228e229d45b20b75b1617b28e not found: ID does not exist" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.607265 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.607372 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.608760 4747 scope.go:117] "RemoveContainer" containerID="155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.613132 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964"} err="failed to get container status \"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964\": rpc error: code = NotFound desc = could not find container \"155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964\": container with ID starting with 155401234e847f64fb70b77cb375d77cfc3c9430663c1ef7498cdb1315d7e964 not found: ID does not exist" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.622584 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.668902 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669040 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669062 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-config-data\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669133 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/28634e0e-24a0-41f6-84ad-ffea557b14ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669164 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669208 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-scripts\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669223 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669282 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28634e0e-24a0-41f6-84ad-ffea557b14ed-logs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.669322 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mlmm\" (UniqueName: \"kubernetes.io/projected/28634e0e-24a0-41f6-84ad-ffea557b14ed-kube-api-access-8mlmm\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.770869 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-scripts\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.770917 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.770975 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28634e0e-24a0-41f6-84ad-ffea557b14ed-logs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.771009 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mlmm\" (UniqueName: \"kubernetes.io/projected/28634e0e-24a0-41f6-84ad-ffea557b14ed-kube-api-access-8mlmm\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.771039 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.771063 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.771084 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-config-data\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.771130 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/28634e0e-24a0-41f6-84ad-ffea557b14ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.771161 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.774865 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28634e0e-24a0-41f6-84ad-ffea557b14ed-logs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.775667 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/28634e0e-24a0-41f6-84ad-ffea557b14ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.779726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.794929 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.795477 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.796386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-config-data\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.797366 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-scripts\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.801256 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/28634e0e-24a0-41f6-84ad-ffea557b14ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.812477 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mlmm\" (UniqueName: \"kubernetes.io/projected/28634e0e-24a0-41f6-84ad-ffea557b14ed-kube-api-access-8mlmm\") pod \"cinder-api-0\" (UID: \"28634e0e-24a0-41f6-84ad-ffea557b14ed\") " pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.845895 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.950821 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.973840 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-combined-ca-bundle\") pod \"0df43a11-4442-4371-bdb3-b49610cefdc0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.973915 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-sg-core-conf-yaml\") pod \"0df43a11-4442-4371-bdb3-b49610cefdc0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.973984 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-scripts\") pod \"0df43a11-4442-4371-bdb3-b49610cefdc0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.974081 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-log-httpd\") pod \"0df43a11-4442-4371-bdb3-b49610cefdc0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.974181 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-run-httpd\") pod \"0df43a11-4442-4371-bdb3-b49610cefdc0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.974232 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pth77\" (UniqueName: \"kubernetes.io/projected/0df43a11-4442-4371-bdb3-b49610cefdc0-kube-api-access-pth77\") pod \"0df43a11-4442-4371-bdb3-b49610cefdc0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.974262 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-config-data\") pod \"0df43a11-4442-4371-bdb3-b49610cefdc0\" (UID: \"0df43a11-4442-4371-bdb3-b49610cefdc0\") " Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.974757 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0df43a11-4442-4371-bdb3-b49610cefdc0" (UID: "0df43a11-4442-4371-bdb3-b49610cefdc0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.974785 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0df43a11-4442-4371-bdb3-b49610cefdc0" (UID: "0df43a11-4442-4371-bdb3-b49610cefdc0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.977321 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-scripts" (OuterVolumeSpecName: "scripts") pod "0df43a11-4442-4371-bdb3-b49610cefdc0" (UID: "0df43a11-4442-4371-bdb3-b49610cefdc0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:35 crc kubenswrapper[4747]: I0202 09:12:35.977877 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0df43a11-4442-4371-bdb3-b49610cefdc0-kube-api-access-pth77" (OuterVolumeSpecName: "kube-api-access-pth77") pod "0df43a11-4442-4371-bdb3-b49610cefdc0" (UID: "0df43a11-4442-4371-bdb3-b49610cefdc0"). InnerVolumeSpecName "kube-api-access-pth77". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.004848 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0df43a11-4442-4371-bdb3-b49610cefdc0" (UID: "0df43a11-4442-4371-bdb3-b49610cefdc0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.027564 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0df43a11-4442-4371-bdb3-b49610cefdc0" (UID: "0df43a11-4442-4371-bdb3-b49610cefdc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.059645 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-config-data" (OuterVolumeSpecName: "config-data") pod "0df43a11-4442-4371-bdb3-b49610cefdc0" (UID: "0df43a11-4442-4371-bdb3-b49610cefdc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.076250 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.076282 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.076295 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.076307 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.076319 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0df43a11-4442-4371-bdb3-b49610cefdc0-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.076333 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pth77\" (UniqueName: \"kubernetes.io/projected/0df43a11-4442-4371-bdb3-b49610cefdc0-kube-api-access-pth77\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.076345 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0df43a11-4442-4371-bdb3-b49610cefdc0-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.357172 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03cfbc6d-1ce4-41fa-8050-28462f48a2a0" path="/var/lib/kubelet/pods/03cfbc6d-1ce4-41fa-8050-28462f48a2a0/volumes" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.398277 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 09:12:36 crc kubenswrapper[4747]: W0202 09:12:36.401292 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28634e0e_24a0_41f6_84ad_ffea557b14ed.slice/crio-eaec55eb6c4853e0ca8a9e52f9641c00a6d1161a2f31416873f46ecef65eeca1 WatchSource:0}: Error finding container eaec55eb6c4853e0ca8a9e52f9641c00a6d1161a2f31416873f46ecef65eeca1: Status 404 returned error can't find the container with id eaec55eb6c4853e0ca8a9e52f9641c00a6d1161a2f31416873f46ecef65eeca1 Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.557889 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0df43a11-4442-4371-bdb3-b49610cefdc0","Type":"ContainerDied","Data":"c6bd4d79cc78ed2fc1542f968612c721f5dacb33c3e4faa711d500bc092ec38d"} Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.557989 4747 scope.go:117] "RemoveContainer" containerID="80ac2be38105a798aa593487588617d0777b3764849898a16cada88b6657d438" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.558018 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.562131 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"28634e0e-24a0-41f6-84ad-ffea557b14ed","Type":"ContainerStarted","Data":"eaec55eb6c4853e0ca8a9e52f9641c00a6d1161a2f31416873f46ecef65eeca1"} Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.616589 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.625148 4747 scope.go:117] "RemoveContainer" containerID="1cbc64e8bd6a26ba6a4e2e9a50228b0b3f2f17f44e711b4dc13457c761349fc4" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.633987 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.646004 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:36 crc kubenswrapper[4747]: E0202 09:12:36.646499 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="proxy-httpd" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.646526 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="proxy-httpd" Feb 02 09:12:36 crc kubenswrapper[4747]: E0202 09:12:36.646551 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="sg-core" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.646559 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="sg-core" Feb 02 09:12:36 crc kubenswrapper[4747]: E0202 09:12:36.646587 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="ceilometer-notification-agent" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.646596 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="ceilometer-notification-agent" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.646840 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="ceilometer-notification-agent" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.646862 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="sg-core" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.646877 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" containerName="proxy-httpd" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.648766 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.650485 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.651288 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.662272 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.673829 4747 scope.go:117] "RemoveContainer" containerID="19f562aec086b19976fada025d9bf69ae03fcca6e3f2d994e785fbf3c3ce5db6" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.692894 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-log-httpd\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.692966 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-run-httpd\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.693039 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-scripts\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.693117 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-config-data\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.693201 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.693453 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.693518 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tngt6\" (UniqueName: \"kubernetes.io/projected/54829773-3e03-44a1-adab-99734d5129ec-kube-api-access-tngt6\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.795683 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-log-httpd\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.795745 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-run-httpd\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.795796 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-scripts\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.795858 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-config-data\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.795885 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.795988 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.796021 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tngt6\" (UniqueName: \"kubernetes.io/projected/54829773-3e03-44a1-adab-99734d5129ec-kube-api-access-tngt6\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.796871 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-log-httpd\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.797260 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-run-httpd\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.801784 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-scripts\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.802190 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.808991 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.810475 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-config-data\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.817066 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tngt6\" (UniqueName: \"kubernetes.io/projected/54829773-3e03-44a1-adab-99734d5129ec-kube-api-access-tngt6\") pod \"ceilometer-0\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " pod="openstack/ceilometer-0" Feb 02 09:12:36 crc kubenswrapper[4747]: I0202 09:12:36.974542 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.232499 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m5674" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" probeResult="failure" output=< Feb 02 09:12:37 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:12:37 crc kubenswrapper[4747]: > Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.447636 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.580318 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerStarted","Data":"c25877cd6a58093d2ffbe6070c41b602ef27978251cc2f6d48cceaaed8b60a07"} Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.582375 4747 generic.go:334] "Generic (PLEG): container finished" podID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerID="5408a980eb0055401b6491bdce5df77f5df5b3e44524804fcddbedb6ce9b75b8" exitCode=0 Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.582433 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" event={"ID":"68e47ed3-c6ed-453b-a1eb-3fd9073c519c","Type":"ContainerDied","Data":"5408a980eb0055401b6491bdce5df77f5df5b3e44524804fcddbedb6ce9b75b8"} Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.584439 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"28634e0e-24a0-41f6-84ad-ffea557b14ed","Type":"ContainerStarted","Data":"dee761d9af4ffd671becf29ec3b4170c98bd1fa2455b9d8e83ba02f03628ede5"} Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.757009 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.818221 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data\") pod \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.818283 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-combined-ca-bundle\") pod \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.818399 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxfpz\" (UniqueName: \"kubernetes.io/projected/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-kube-api-access-jxfpz\") pod \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.818466 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-logs\") pod \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.818509 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data-custom\") pod \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\" (UID: \"68e47ed3-c6ed-453b-a1eb-3fd9073c519c\") " Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.819136 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-logs" (OuterVolumeSpecName: "logs") pod "68e47ed3-c6ed-453b-a1eb-3fd9073c519c" (UID: "68e47ed3-c6ed-453b-a1eb-3fd9073c519c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.825965 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "68e47ed3-c6ed-453b-a1eb-3fd9073c519c" (UID: "68e47ed3-c6ed-453b-a1eb-3fd9073c519c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.829332 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-kube-api-access-jxfpz" (OuterVolumeSpecName: "kube-api-access-jxfpz") pod "68e47ed3-c6ed-453b-a1eb-3fd9073c519c" (UID: "68e47ed3-c6ed-453b-a1eb-3fd9073c519c"). InnerVolumeSpecName "kube-api-access-jxfpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.848582 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68e47ed3-c6ed-453b-a1eb-3fd9073c519c" (UID: "68e47ed3-c6ed-453b-a1eb-3fd9073c519c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.875118 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data" (OuterVolumeSpecName: "config-data") pod "68e47ed3-c6ed-453b-a1eb-3fd9073c519c" (UID: "68e47ed3-c6ed-453b-a1eb-3fd9073c519c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.920477 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxfpz\" (UniqueName: \"kubernetes.io/projected/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-kube-api-access-jxfpz\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.920532 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.920547 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.920558 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:37 crc kubenswrapper[4747]: I0202 09:12:37.920570 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68e47ed3-c6ed-453b-a1eb-3fd9073c519c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.354562 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0df43a11-4442-4371-bdb3-b49610cefdc0" path="/var/lib/kubelet/pods/0df43a11-4442-4371-bdb3-b49610cefdc0/volumes" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.619551 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"28634e0e-24a0-41f6-84ad-ffea557b14ed","Type":"ContainerStarted","Data":"291fa091940db41220649001726f18f4c45001d04f21e48fdefa2bc57c5c5169"} Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.622592 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.625954 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerStarted","Data":"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6"} Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.638766 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" event={"ID":"68e47ed3-c6ed-453b-a1eb-3fd9073c519c","Type":"ContainerDied","Data":"4b0636762d5cf6cefc1350c45f2f795f257035b7ec9f8c35939852e19d17ecc7"} Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.638821 4747 scope.go:117] "RemoveContainer" containerID="5408a980eb0055401b6491bdce5df77f5df5b3e44524804fcddbedb6ce9b75b8" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.638987 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f57f97874-kjzqz" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.653802 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.65378499 podStartE2EDuration="3.65378499s" podCreationTimestamp="2026-02-02 09:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:38.64616073 +0000 UTC m=+971.190499193" watchObservedRunningTime="2026-02-02 09:12:38.65378499 +0000 UTC m=+971.198123413" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.675453 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-6f57f97874-kjzqz"] Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.677064 4747 scope.go:117] "RemoveContainer" containerID="ee0fe79f015ef9137c12a826f435ef739ca34c5a09c1a8ada9a4f78df37dd739" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.691572 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-6f57f97874-kjzqz"] Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.921835 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 09:12:38 crc kubenswrapper[4747]: I0202 09:12:38.990316 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.049224 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-6f5vq"] Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.049482 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" podUID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerName="dnsmasq-dns" containerID="cri-o://12baec122d866eb59ec6f7c4a2c06f07157e219fdaeaa777a32cf5f9afebd513" gracePeriod=10 Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.287345 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.713211 4747 generic.go:334] "Generic (PLEG): container finished" podID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerID="12baec122d866eb59ec6f7c4a2c06f07157e219fdaeaa777a32cf5f9afebd513" exitCode=0 Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.713617 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" event={"ID":"6163cdad-08d0-436e-967b-fa422f3dc4cd","Type":"ContainerDied","Data":"12baec122d866eb59ec6f7c4a2c06f07157e219fdaeaa777a32cf5f9afebd513"} Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.736991 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerStarted","Data":"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1"} Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.806982 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.907840 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.965688 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92xjw\" (UniqueName: \"kubernetes.io/projected/6163cdad-08d0-436e-967b-fa422f3dc4cd-kube-api-access-92xjw\") pod \"6163cdad-08d0-436e-967b-fa422f3dc4cd\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.966106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-swift-storage-0\") pod \"6163cdad-08d0-436e-967b-fa422f3dc4cd\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.966266 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-config\") pod \"6163cdad-08d0-436e-967b-fa422f3dc4cd\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.966326 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-svc\") pod \"6163cdad-08d0-436e-967b-fa422f3dc4cd\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.966420 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-nb\") pod \"6163cdad-08d0-436e-967b-fa422f3dc4cd\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " Feb 02 09:12:39 crc kubenswrapper[4747]: I0202 09:12:39.966477 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-sb\") pod \"6163cdad-08d0-436e-967b-fa422f3dc4cd\" (UID: \"6163cdad-08d0-436e-967b-fa422f3dc4cd\") " Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.006188 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6163cdad-08d0-436e-967b-fa422f3dc4cd-kube-api-access-92xjw" (OuterVolumeSpecName: "kube-api-access-92xjw") pod "6163cdad-08d0-436e-967b-fa422f3dc4cd" (UID: "6163cdad-08d0-436e-967b-fa422f3dc4cd"). InnerVolumeSpecName "kube-api-access-92xjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.068539 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92xjw\" (UniqueName: \"kubernetes.io/projected/6163cdad-08d0-436e-967b-fa422f3dc4cd-kube-api-access-92xjw\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.179667 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6163cdad-08d0-436e-967b-fa422f3dc4cd" (UID: "6163cdad-08d0-436e-967b-fa422f3dc4cd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.189973 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-config" (OuterVolumeSpecName: "config") pod "6163cdad-08d0-436e-967b-fa422f3dc4cd" (UID: "6163cdad-08d0-436e-967b-fa422f3dc4cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.200297 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6163cdad-08d0-436e-967b-fa422f3dc4cd" (UID: "6163cdad-08d0-436e-967b-fa422f3dc4cd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.239475 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6163cdad-08d0-436e-967b-fa422f3dc4cd" (UID: "6163cdad-08d0-436e-967b-fa422f3dc4cd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.244463 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6163cdad-08d0-436e-967b-fa422f3dc4cd" (UID: "6163cdad-08d0-436e-967b-fa422f3dc4cd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.272131 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.272168 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.272177 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.272188 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.272196 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6163cdad-08d0-436e-967b-fa422f3dc4cd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.361118 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" path="/var/lib/kubelet/pods/68e47ed3-c6ed-453b-a1eb-3fd9073c519c/volumes" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.745321 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.745439 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-6f5vq" event={"ID":"6163cdad-08d0-436e-967b-fa422f3dc4cd","Type":"ContainerDied","Data":"cfdfef7ed9e3c852cb4fdc37716bb7c09f92ba9503cb9c4640dbbdefc3858163"} Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.745510 4747 scope.go:117] "RemoveContainer" containerID="12baec122d866eb59ec6f7c4a2c06f07157e219fdaeaa777a32cf5f9afebd513" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.748970 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerStarted","Data":"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775"} Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.749670 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="cinder-scheduler" containerID="cri-o://d54509da86677af39b62966a4fd76e5b22515bf4e726a95cebba7762f692c5b8" gracePeriod=30 Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.750193 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="probe" containerID="cri-o://65328239a3adb669e1bb8b09edfcc8c219f5cb8f315b741b7000609e9d908dd0" gracePeriod=30 Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.759078 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.770436 4747 scope.go:117] "RemoveContainer" containerID="b68ee08bd5e55733b847e220a58b4025d99dd7013615fe2da81fccd6926727c8" Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.782178 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-6f5vq"] Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.796154 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-6f5vq"] Feb 02 09:12:40 crc kubenswrapper[4747]: I0202 09:12:40.977443 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.239372 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-75b94989dd-wzr2t"] Feb 02 09:12:41 crc kubenswrapper[4747]: E0202 09:12:41.240473 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerName="init" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.240498 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerName="init" Feb 02 09:12:41 crc kubenswrapper[4747]: E0202 09:12:41.240546 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener-log" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.240554 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener-log" Feb 02 09:12:41 crc kubenswrapper[4747]: E0202 09:12:41.240577 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.240588 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener" Feb 02 09:12:41 crc kubenswrapper[4747]: E0202 09:12:41.240608 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerName="dnsmasq-dns" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.240614 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerName="dnsmasq-dns" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.241026 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6163cdad-08d0-436e-967b-fa422f3dc4cd" containerName="dnsmasq-dns" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.241080 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.241115 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="68e47ed3-c6ed-453b-a1eb-3fd9073c519c" containerName="barbican-keystone-listener-log" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.245827 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.248892 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75b94989dd-wzr2t"] Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.297536 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-scripts\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.297605 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-combined-ca-bundle\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.297626 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-public-tls-certs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.297665 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkdfk\" (UniqueName: \"kubernetes.io/projected/10126fda-de55-4027-9446-86789ba2852f-kube-api-access-nkdfk\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.297697 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10126fda-de55-4027-9446-86789ba2852f-logs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.297760 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-config-data\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.297803 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-internal-tls-certs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.351654 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.399106 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-scripts\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.399172 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-combined-ca-bundle\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.399190 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-public-tls-certs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.399251 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkdfk\" (UniqueName: \"kubernetes.io/projected/10126fda-de55-4027-9446-86789ba2852f-kube-api-access-nkdfk\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.399291 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10126fda-de55-4027-9446-86789ba2852f-logs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.399381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-config-data\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.399462 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-internal-tls-certs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.400303 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10126fda-de55-4027-9446-86789ba2852f-logs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.405064 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-scripts\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.405172 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-public-tls-certs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.407594 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-config-data\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.410828 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-combined-ca-bundle\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.413527 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/10126fda-de55-4027-9446-86789ba2852f-internal-tls-certs\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.421561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkdfk\" (UniqueName: \"kubernetes.io/projected/10126fda-de55-4027-9446-86789ba2852f-kube-api-access-nkdfk\") pod \"placement-75b94989dd-wzr2t\" (UID: \"10126fda-de55-4027-9446-86789ba2852f\") " pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.578919 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6cf448654b-n4qqr" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.586434 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.632748 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.640243 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-569bd9644b-d92kb"] Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.640467 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-569bd9644b-d92kb" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api-log" containerID="cri-o://7311969c15e641d1484292bb2aa2c5f540049b368724df6feb3c3555ced64982" gracePeriod=30 Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.640621 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-569bd9644b-d92kb" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api" containerID="cri-o://b4d673c2f38a726f1dbacd3bfff838dd6524d79039863542833197782321a394" gracePeriod=30 Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.792764 4747 generic.go:334] "Generic (PLEG): container finished" podID="a32243a8-81da-4d79-927e-413df2383bd7" containerID="7311969c15e641d1484292bb2aa2c5f540049b368724df6feb3c3555ced64982" exitCode=143 Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.792832 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-569bd9644b-d92kb" event={"ID":"a32243a8-81da-4d79-927e-413df2383bd7","Type":"ContainerDied","Data":"7311969c15e641d1484292bb2aa2c5f540049b368724df6feb3c3555ced64982"} Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.794306 4747 generic.go:334] "Generic (PLEG): container finished" podID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerID="4b1c422fc70e0e5f80cae252a22d70243036db3e120b0f9d689ee56ea9c54126" exitCode=0 Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.794364 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b4dbd8489-wjfdd" event={"ID":"03e77cb3-3fbb-4dda-8293-f4ed73283262","Type":"ContainerDied","Data":"4b1c422fc70e0e5f80cae252a22d70243036db3e120b0f9d689ee56ea9c54126"} Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.796635 4747 generic.go:334] "Generic (PLEG): container finished" podID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerID="65328239a3adb669e1bb8b09edfcc8c219f5cb8f315b741b7000609e9d908dd0" exitCode=0 Feb 02 09:12:41 crc kubenswrapper[4747]: I0202 09:12:41.796656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29ee5c95-68dc-4d84-8422-e39996aa1c51","Type":"ContainerDied","Data":"65328239a3adb669e1bb8b09edfcc8c219f5cb8f315b741b7000609e9d908dd0"} Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.217600 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-75b94989dd-wzr2t"] Feb 02 09:12:42 crc kubenswrapper[4747]: W0202 09:12:42.231276 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10126fda_de55_4027_9446_86789ba2852f.slice/crio-0f09c212d200449c001db37864cd728f79bf1bac574b5972a19669bcd8bc5f48 WatchSource:0}: Error finding container 0f09c212d200449c001db37864cd728f79bf1bac574b5972a19669bcd8bc5f48: Status 404 returned error can't find the container with id 0f09c212d200449c001db37864cd728f79bf1bac574b5972a19669bcd8bc5f48 Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.332291 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.375159 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6163cdad-08d0-436e-967b-fa422f3dc4cd" path="/var/lib/kubelet/pods/6163cdad-08d0-436e-967b-fa422f3dc4cd/volumes" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.433728 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-httpd-config\") pod \"03e77cb3-3fbb-4dda-8293-f4ed73283262\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.433929 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-combined-ca-bundle\") pod \"03e77cb3-3fbb-4dda-8293-f4ed73283262\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.433976 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-ovndb-tls-certs\") pod \"03e77cb3-3fbb-4dda-8293-f4ed73283262\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.433997 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-public-tls-certs\") pod \"03e77cb3-3fbb-4dda-8293-f4ed73283262\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.434030 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-config\") pod \"03e77cb3-3fbb-4dda-8293-f4ed73283262\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.434188 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcfmr\" (UniqueName: \"kubernetes.io/projected/03e77cb3-3fbb-4dda-8293-f4ed73283262-kube-api-access-xcfmr\") pod \"03e77cb3-3fbb-4dda-8293-f4ed73283262\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.434222 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-internal-tls-certs\") pod \"03e77cb3-3fbb-4dda-8293-f4ed73283262\" (UID: \"03e77cb3-3fbb-4dda-8293-f4ed73283262\") " Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.447106 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03e77cb3-3fbb-4dda-8293-f4ed73283262-kube-api-access-xcfmr" (OuterVolumeSpecName: "kube-api-access-xcfmr") pod "03e77cb3-3fbb-4dda-8293-f4ed73283262" (UID: "03e77cb3-3fbb-4dda-8293-f4ed73283262"). InnerVolumeSpecName "kube-api-access-xcfmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.455883 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "03e77cb3-3fbb-4dda-8293-f4ed73283262" (UID: "03e77cb3-3fbb-4dda-8293-f4ed73283262"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.536095 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcfmr\" (UniqueName: \"kubernetes.io/projected/03e77cb3-3fbb-4dda-8293-f4ed73283262-kube-api-access-xcfmr\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.536124 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.587658 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-config" (OuterVolumeSpecName: "config") pod "03e77cb3-3fbb-4dda-8293-f4ed73283262" (UID: "03e77cb3-3fbb-4dda-8293-f4ed73283262"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.595358 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "03e77cb3-3fbb-4dda-8293-f4ed73283262" (UID: "03e77cb3-3fbb-4dda-8293-f4ed73283262"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.643521 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.643556 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.648162 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03e77cb3-3fbb-4dda-8293-f4ed73283262" (UID: "03e77cb3-3fbb-4dda-8293-f4ed73283262"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.680266 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "03e77cb3-3fbb-4dda-8293-f4ed73283262" (UID: "03e77cb3-3fbb-4dda-8293-f4ed73283262"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.728836 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "03e77cb3-3fbb-4dda-8293-f4ed73283262" (UID: "03e77cb3-3fbb-4dda-8293-f4ed73283262"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.744789 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.744820 4747 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.744831 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e77cb3-3fbb-4dda-8293-f4ed73283262-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.823376 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerStarted","Data":"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b"} Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.825511 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.854214 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b4dbd8489-wjfdd" event={"ID":"03e77cb3-3fbb-4dda-8293-f4ed73283262","Type":"ContainerDied","Data":"bacb4070c9ab5dcf38f94f503edf06993f33b0fd01c73bead2f6865062fb06b1"} Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.854264 4747 scope.go:117] "RemoveContainer" containerID="640cd96beda6a2402b48a48c7eba3997597527536cc9ad3d969289503765fbd3" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.854413 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b4dbd8489-wjfdd" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.866028 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.317955704 podStartE2EDuration="6.86600658s" podCreationTimestamp="2026-02-02 09:12:36 +0000 UTC" firstStartedPulling="2026-02-02 09:12:37.476868952 +0000 UTC m=+970.021207385" lastFinishedPulling="2026-02-02 09:12:42.024919828 +0000 UTC m=+974.569258261" observedRunningTime="2026-02-02 09:12:42.855091986 +0000 UTC m=+975.399430439" watchObservedRunningTime="2026-02-02 09:12:42.86600658 +0000 UTC m=+975.410345013" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.874926 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b94989dd-wzr2t" event={"ID":"10126fda-de55-4027-9446-86789ba2852f","Type":"ContainerStarted","Data":"a9df81bde6f9dd2cf04f6f6ee78c91320235346706826ec2ae8541caaecbc9d1"} Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.874982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b94989dd-wzr2t" event={"ID":"10126fda-de55-4027-9446-86789ba2852f","Type":"ContainerStarted","Data":"0f09c212d200449c001db37864cd728f79bf1bac574b5972a19669bcd8bc5f48"} Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.908720 4747 scope.go:117] "RemoveContainer" containerID="4b1c422fc70e0e5f80cae252a22d70243036db3e120b0f9d689ee56ea9c54126" Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.946729 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7b4dbd8489-wjfdd"] Feb 02 09:12:42 crc kubenswrapper[4747]: I0202 09:12:42.959982 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7b4dbd8489-wjfdd"] Feb 02 09:12:43 crc kubenswrapper[4747]: I0202 09:12:43.547485 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7495bf65bd-857k2" Feb 02 09:12:43 crc kubenswrapper[4747]: I0202 09:12:43.612836 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69bf4987b8-zq2rd"] Feb 02 09:12:43 crc kubenswrapper[4747]: I0202 09:12:43.613083 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon-log" containerID="cri-o://a0c42adcbe1996d24d8aebea7daa350ea0b388498a4ddae2b820374337d92f36" gracePeriod=30 Feb 02 09:12:43 crc kubenswrapper[4747]: I0202 09:12:43.613229 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" containerID="cri-o://b58beb9bccf8377ae3190cb61f1add3bdb034c8ba31eebd73a6e7366b3cb3328" gracePeriod=30 Feb 02 09:12:43 crc kubenswrapper[4747]: I0202 09:12:43.624115 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": EOF" Feb 02 09:12:43 crc kubenswrapper[4747]: I0202 09:12:43.886392 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-75b94989dd-wzr2t" event={"ID":"10126fda-de55-4027-9446-86789ba2852f","Type":"ContainerStarted","Data":"64a324d0dd49d4a72d4dff45f417f0ff5de359e65ffddb2aed9255015727c61b"} Feb 02 09:12:43 crc kubenswrapper[4747]: I0202 09:12:43.906383 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-75b94989dd-wzr2t" podStartSLOduration=2.9063605 podStartE2EDuration="2.9063605s" podCreationTimestamp="2026-02-02 09:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:43.902065812 +0000 UTC m=+976.446404245" watchObservedRunningTime="2026-02-02 09:12:43.9063605 +0000 UTC m=+976.450698933" Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.350548 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" path="/var/lib/kubelet/pods/03e77cb3-3fbb-4dda-8293-f4ed73283262/volumes" Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.822619 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-569bd9644b-d92kb" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:45172->10.217.0.160:9311: read: connection reset by peer" Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.822619 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-569bd9644b-d92kb" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:45180->10.217.0.160:9311: read: connection reset by peer" Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.900555 4747 generic.go:334] "Generic (PLEG): container finished" podID="a32243a8-81da-4d79-927e-413df2383bd7" containerID="b4d673c2f38a726f1dbacd3bfff838dd6524d79039863542833197782321a394" exitCode=0 Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.900661 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-569bd9644b-d92kb" event={"ID":"a32243a8-81da-4d79-927e-413df2383bd7","Type":"ContainerDied","Data":"b4d673c2f38a726f1dbacd3bfff838dd6524d79039863542833197782321a394"} Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.903085 4747 generic.go:334] "Generic (PLEG): container finished" podID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerID="d54509da86677af39b62966a4fd76e5b22515bf4e726a95cebba7762f692c5b8" exitCode=0 Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.903145 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29ee5c95-68dc-4d84-8422-e39996aa1c51","Type":"ContainerDied","Data":"d54509da86677af39b62966a4fd76e5b22515bf4e726a95cebba7762f692c5b8"} Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.904146 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:44 crc kubenswrapper[4747]: I0202 09:12:44.904168 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.107165 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.192533 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-scripts\") pod \"29ee5c95-68dc-4d84-8422-e39996aa1c51\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.192679 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data\") pod \"29ee5c95-68dc-4d84-8422-e39996aa1c51\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.192718 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29ee5c95-68dc-4d84-8422-e39996aa1c51-etc-machine-id\") pod \"29ee5c95-68dc-4d84-8422-e39996aa1c51\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.192855 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-combined-ca-bundle\") pod \"29ee5c95-68dc-4d84-8422-e39996aa1c51\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.192880 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data-custom\") pod \"29ee5c95-68dc-4d84-8422-e39996aa1c51\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.192975 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4blbx\" (UniqueName: \"kubernetes.io/projected/29ee5c95-68dc-4d84-8422-e39996aa1c51-kube-api-access-4blbx\") pod \"29ee5c95-68dc-4d84-8422-e39996aa1c51\" (UID: \"29ee5c95-68dc-4d84-8422-e39996aa1c51\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.196012 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29ee5c95-68dc-4d84-8422-e39996aa1c51-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "29ee5c95-68dc-4d84-8422-e39996aa1c51" (UID: "29ee5c95-68dc-4d84-8422-e39996aa1c51"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.221969 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29ee5c95-68dc-4d84-8422-e39996aa1c51-kube-api-access-4blbx" (OuterVolumeSpecName: "kube-api-access-4blbx") pod "29ee5c95-68dc-4d84-8422-e39996aa1c51" (UID: "29ee5c95-68dc-4d84-8422-e39996aa1c51"). InnerVolumeSpecName "kube-api-access-4blbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.243114 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-scripts" (OuterVolumeSpecName: "scripts") pod "29ee5c95-68dc-4d84-8422-e39996aa1c51" (UID: "29ee5c95-68dc-4d84-8422-e39996aa1c51"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.269134 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "29ee5c95-68dc-4d84-8422-e39996aa1c51" (UID: "29ee5c95-68dc-4d84-8422-e39996aa1c51"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.301135 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.301173 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4blbx\" (UniqueName: \"kubernetes.io/projected/29ee5c95-68dc-4d84-8422-e39996aa1c51-kube-api-access-4blbx\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.301188 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.301201 4747 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/29ee5c95-68dc-4d84-8422-e39996aa1c51-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.377550 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29ee5c95-68dc-4d84-8422-e39996aa1c51" (UID: "29ee5c95-68dc-4d84-8422-e39996aa1c51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.396670 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.403509 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.436121 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data" (OuterVolumeSpecName: "config-data") pod "29ee5c95-68dc-4d84-8422-e39996aa1c51" (UID: "29ee5c95-68dc-4d84-8422-e39996aa1c51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.504761 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data\") pod \"a32243a8-81da-4d79-927e-413df2383bd7\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.505109 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-internal-tls-certs\") pod \"a32243a8-81da-4d79-927e-413df2383bd7\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.505273 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-public-tls-certs\") pod \"a32243a8-81da-4d79-927e-413df2383bd7\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.505393 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data-custom\") pod \"a32243a8-81da-4d79-927e-413df2383bd7\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.505547 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-combined-ca-bundle\") pod \"a32243a8-81da-4d79-927e-413df2383bd7\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.505715 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b47hj\" (UniqueName: \"kubernetes.io/projected/a32243a8-81da-4d79-927e-413df2383bd7-kube-api-access-b47hj\") pod \"a32243a8-81da-4d79-927e-413df2383bd7\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.505831 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32243a8-81da-4d79-927e-413df2383bd7-logs\") pod \"a32243a8-81da-4d79-927e-413df2383bd7\" (UID: \"a32243a8-81da-4d79-927e-413df2383bd7\") " Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.506282 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ee5c95-68dc-4d84-8422-e39996aa1c51-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.506772 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a32243a8-81da-4d79-927e-413df2383bd7-logs" (OuterVolumeSpecName: "logs") pod "a32243a8-81da-4d79-927e-413df2383bd7" (UID: "a32243a8-81da-4d79-927e-413df2383bd7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.508965 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a32243a8-81da-4d79-927e-413df2383bd7" (UID: "a32243a8-81da-4d79-927e-413df2383bd7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.509067 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a32243a8-81da-4d79-927e-413df2383bd7-kube-api-access-b47hj" (OuterVolumeSpecName: "kube-api-access-b47hj") pod "a32243a8-81da-4d79-927e-413df2383bd7" (UID: "a32243a8-81da-4d79-927e-413df2383bd7"). InnerVolumeSpecName "kube-api-access-b47hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.538658 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a32243a8-81da-4d79-927e-413df2383bd7" (UID: "a32243a8-81da-4d79-927e-413df2383bd7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.558948 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a32243a8-81da-4d79-927e-413df2383bd7" (UID: "a32243a8-81da-4d79-927e-413df2383bd7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.560696 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data" (OuterVolumeSpecName: "config-data") pod "a32243a8-81da-4d79-927e-413df2383bd7" (UID: "a32243a8-81da-4d79-927e-413df2383bd7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.563532 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a32243a8-81da-4d79-927e-413df2383bd7" (UID: "a32243a8-81da-4d79-927e-413df2383bd7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.610158 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.610195 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.610204 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.610215 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b47hj\" (UniqueName: \"kubernetes.io/projected/a32243a8-81da-4d79-927e-413df2383bd7-kube-api-access-b47hj\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.610224 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a32243a8-81da-4d79-927e-413df2383bd7-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.610234 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.610242 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32243a8-81da-4d79-927e-413df2383bd7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.916207 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"29ee5c95-68dc-4d84-8422-e39996aa1c51","Type":"ContainerDied","Data":"44d2d88f2481d261fe693b22669665dd90b30b4eed57f36a00df301cd1f962de"} Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.916255 4747 scope.go:117] "RemoveContainer" containerID="65328239a3adb669e1bb8b09edfcc8c219f5cb8f315b741b7000609e9d908dd0" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.916361 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.919993 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-569bd9644b-d92kb" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.923420 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-569bd9644b-d92kb" event={"ID":"a32243a8-81da-4d79-927e-413df2383bd7","Type":"ContainerDied","Data":"832b0255e2095e3ff06b90271b1359ac05eab348c5358a9fa6ce5884705a4c91"} Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.951894 4747 scope.go:117] "RemoveContainer" containerID="d54509da86677af39b62966a4fd76e5b22515bf4e726a95cebba7762f692c5b8" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.983602 4747 scope.go:117] "RemoveContainer" containerID="b4d673c2f38a726f1dbacd3bfff838dd6524d79039863542833197782321a394" Feb 02 09:12:45 crc kubenswrapper[4747]: I0202 09:12:45.991313 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.011267 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021084 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:46 crc kubenswrapper[4747]: E0202 09:12:46.021509 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-httpd" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021529 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-httpd" Feb 02 09:12:46 crc kubenswrapper[4747]: E0202 09:12:46.021543 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="cinder-scheduler" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021551 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="cinder-scheduler" Feb 02 09:12:46 crc kubenswrapper[4747]: E0202 09:12:46.021565 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-api" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021571 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-api" Feb 02 09:12:46 crc kubenswrapper[4747]: E0202 09:12:46.021587 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021592 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api" Feb 02 09:12:46 crc kubenswrapper[4747]: E0202 09:12:46.021603 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api-log" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021611 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api-log" Feb 02 09:12:46 crc kubenswrapper[4747]: E0202 09:12:46.021631 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="probe" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021640 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="probe" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021800 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api-log" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021822 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-httpd" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021834 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="03e77cb3-3fbb-4dda-8293-f4ed73283262" containerName="neutron-api" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021861 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="cinder-scheduler" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021874 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a32243a8-81da-4d79-927e-413df2383bd7" containerName="barbican-api" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.021886 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" containerName="probe" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.022817 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.025438 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.031663 4747 scope.go:117] "RemoveContainer" containerID="7311969c15e641d1484292bb2aa2c5f540049b368724df6feb3c3555ced64982" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.034539 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.049192 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-569bd9644b-d92kb"] Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.055352 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-569bd9644b-d92kb"] Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.120424 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-config-data\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.120544 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.120655 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.120763 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.120848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4h9z\" (UniqueName: \"kubernetes.io/projected/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-kube-api-access-l4h9z\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.121079 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-scripts\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.223881 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-config-data\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.224151 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.224184 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.224213 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.224239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4h9z\" (UniqueName: \"kubernetes.io/projected/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-kube-api-access-l4h9z\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.224295 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-scripts\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.224334 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.231035 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.232266 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-scripts\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.245042 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-config-data\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.246775 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.248764 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4h9z\" (UniqueName: \"kubernetes.io/projected/bfd38cbd-124d-4f9a-9b9b-d724b277fbcb-kube-api-access-l4h9z\") pod \"cinder-scheduler-0\" (UID: \"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb\") " pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.352821 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29ee5c95-68dc-4d84-8422-e39996aa1c51" path="/var/lib/kubelet/pods/29ee5c95-68dc-4d84-8422-e39996aa1c51/volumes" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.353848 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a32243a8-81da-4d79-927e-413df2383bd7" path="/var/lib/kubelet/pods/a32243a8-81da-4d79-927e-413df2383bd7/volumes" Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.356972 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 09:12:46 crc kubenswrapper[4747]: W0202 09:12:46.812987 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfd38cbd_124d_4f9a_9b9b_d724b277fbcb.slice/crio-e447255eacdc29e888ba13e4c0ede3c0c4c386241fffb03e73f651d2c0d6258b WatchSource:0}: Error finding container e447255eacdc29e888ba13e4c0ede3c0c4c386241fffb03e73f651d2c0d6258b: Status 404 returned error can't find the container with id e447255eacdc29e888ba13e4c0ede3c0c4c386241fffb03e73f651d2c0d6258b Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.815260 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 09:12:46 crc kubenswrapper[4747]: I0202 09:12:46.934430 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb","Type":"ContainerStarted","Data":"e447255eacdc29e888ba13e4c0ede3c0c4c386241fffb03e73f651d2c0d6258b"} Feb 02 09:12:47 crc kubenswrapper[4747]: I0202 09:12:47.015477 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:40510->10.217.0.145:8443: read: connection reset by peer" Feb 02 09:12:47 crc kubenswrapper[4747]: I0202 09:12:47.236169 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m5674" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" probeResult="failure" output=< Feb 02 09:12:47 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:12:47 crc kubenswrapper[4747]: > Feb 02 09:12:47 crc kubenswrapper[4747]: I0202 09:12:47.954840 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb","Type":"ContainerStarted","Data":"d31a1dd3590c4b35dab05b34bdae3e518a14df0b8044ee75abbf758ce69295e5"} Feb 02 09:12:47 crc kubenswrapper[4747]: I0202 09:12:47.958802 4747 generic.go:334] "Generic (PLEG): container finished" podID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerID="b58beb9bccf8377ae3190cb61f1add3bdb034c8ba31eebd73a6e7366b3cb3328" exitCode=0 Feb 02 09:12:47 crc kubenswrapper[4747]: I0202 09:12:47.958845 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69bf4987b8-zq2rd" event={"ID":"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae","Type":"ContainerDied","Data":"b58beb9bccf8377ae3190cb61f1add3bdb034c8ba31eebd73a6e7366b3cb3328"} Feb 02 09:12:48 crc kubenswrapper[4747]: I0202 09:12:48.023116 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 02 09:12:48 crc kubenswrapper[4747]: I0202 09:12:48.526977 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Feb 02 09:12:48 crc kubenswrapper[4747]: I0202 09:12:48.970782 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bfd38cbd-124d-4f9a-9b9b-d724b277fbcb","Type":"ContainerStarted","Data":"81bd0256c932753d65d3d3fa9f457bde6fe3a7d740f3e04969e7f88323415076"} Feb 02 09:12:48 crc kubenswrapper[4747]: I0202 09:12:48.995830 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.995790243 podStartE2EDuration="3.995790243s" podCreationTimestamp="2026-02-02 09:12:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:48.987561857 +0000 UTC m=+981.531900290" watchObservedRunningTime="2026-02-02 09:12:48.995790243 +0000 UTC m=+981.540128676" Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.518473 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.518867 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.518909 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.519965 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6ce984bd6987138797418ab2142f7c5fec0cece146a45e3138b5c727a63dd683"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.520016 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://6ce984bd6987138797418ab2142f7c5fec0cece146a45e3138b5c727a63dd683" gracePeriod=600 Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.605751 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-559c565ccd-md2lx" Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.991245 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="6ce984bd6987138797418ab2142f7c5fec0cece146a45e3138b5c727a63dd683" exitCode=0 Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.991408 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"6ce984bd6987138797418ab2142f7c5fec0cece146a45e3138b5c727a63dd683"} Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.991557 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"cfcdbd71d081b839dee0ed836834b42e47c3f661c3c5fc464d12e55a08f08627"} Feb 02 09:12:50 crc kubenswrapper[4747]: I0202 09:12:50.991577 4747 scope.go:117] "RemoveContainer" containerID="b0adff2464258c94f1128c08c9ab4c452fc551d8ccc27b1f6ec0c8bc78ed9f7b" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.157729 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.159767 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.161572 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.162693 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-bmmpf" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.165652 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.166330 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.224636 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rwjg\" (UniqueName: \"kubernetes.io/projected/54560832-e0ee-4493-a567-b4a3e7ca4e8f-kube-api-access-9rwjg\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.224774 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/54560832-e0ee-4493-a567-b4a3e7ca4e8f-openstack-config-secret\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.224825 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54560832-e0ee-4493-a567-b4a3e7ca4e8f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.224862 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/54560832-e0ee-4493-a567-b4a3e7ca4e8f-openstack-config\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.326101 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/54560832-e0ee-4493-a567-b4a3e7ca4e8f-openstack-config\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.326169 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rwjg\" (UniqueName: \"kubernetes.io/projected/54560832-e0ee-4493-a567-b4a3e7ca4e8f-kube-api-access-9rwjg\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.326296 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/54560832-e0ee-4493-a567-b4a3e7ca4e8f-openstack-config-secret\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.326356 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54560832-e0ee-4493-a567-b4a3e7ca4e8f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.327142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/54560832-e0ee-4493-a567-b4a3e7ca4e8f-openstack-config\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.332863 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54560832-e0ee-4493-a567-b4a3e7ca4e8f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.335389 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/54560832-e0ee-4493-a567-b4a3e7ca4e8f-openstack-config-secret\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.346480 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rwjg\" (UniqueName: \"kubernetes.io/projected/54560832-e0ee-4493-a567-b4a3e7ca4e8f-kube-api-access-9rwjg\") pod \"openstackclient\" (UID: \"54560832-e0ee-4493-a567-b4a3e7ca4e8f\") " pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.357720 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.476129 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 09:12:51 crc kubenswrapper[4747]: I0202 09:12:51.943194 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 09:12:52 crc kubenswrapper[4747]: I0202 09:12:52.003140 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"54560832-e0ee-4493-a567-b4a3e7ca4e8f","Type":"ContainerStarted","Data":"2bbb235ffe52d08f6972b59a2fb0f532a519528dbfa22c1ad1eab5e0370009ae"} Feb 02 09:12:54 crc kubenswrapper[4747]: I0202 09:12:54.923542 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-67d8547547-ntqwl"] Feb 02 09:12:54 crc kubenswrapper[4747]: I0202 09:12:54.928432 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:54 crc kubenswrapper[4747]: I0202 09:12:54.931600 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Feb 02 09:12:54 crc kubenswrapper[4747]: I0202 09:12:54.932209 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 02 09:12:54 crc kubenswrapper[4747]: I0202 09:12:54.932452 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Feb 02 09:12:54 crc kubenswrapper[4747]: I0202 09:12:54.941524 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-67d8547547-ntqwl"] Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.010580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-public-tls-certs\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.010625 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-combined-ca-bundle\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.010653 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c6682738-3d59-4dee-aca4-df90ecf8179e-log-httpd\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.010783 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-internal-tls-certs\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.010855 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c6682738-3d59-4dee-aca4-df90ecf8179e-etc-swift\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.011006 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-config-data\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.011039 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c6682738-3d59-4dee-aca4-df90ecf8179e-run-httpd\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.011265 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9ng5\" (UniqueName: \"kubernetes.io/projected/c6682738-3d59-4dee-aca4-df90ecf8179e-kube-api-access-n9ng5\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113046 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-public-tls-certs\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113388 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-combined-ca-bundle\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113415 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c6682738-3d59-4dee-aca4-df90ecf8179e-log-httpd\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113478 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-internal-tls-certs\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113508 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c6682738-3d59-4dee-aca4-df90ecf8179e-etc-swift\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-config-data\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113583 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c6682738-3d59-4dee-aca4-df90ecf8179e-run-httpd\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.113684 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9ng5\" (UniqueName: \"kubernetes.io/projected/c6682738-3d59-4dee-aca4-df90ecf8179e-kube-api-access-n9ng5\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.114857 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c6682738-3d59-4dee-aca4-df90ecf8179e-log-httpd\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.115181 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c6682738-3d59-4dee-aca4-df90ecf8179e-run-httpd\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.122610 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-config-data\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.122903 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c6682738-3d59-4dee-aca4-df90ecf8179e-etc-swift\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.123737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-internal-tls-certs\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.123991 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-public-tls-certs\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.125282 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6682738-3d59-4dee-aca4-df90ecf8179e-combined-ca-bundle\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.141701 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9ng5\" (UniqueName: \"kubernetes.io/projected/c6682738-3d59-4dee-aca4-df90ecf8179e-kube-api-access-n9ng5\") pod \"swift-proxy-67d8547547-ntqwl\" (UID: \"c6682738-3d59-4dee-aca4-df90ecf8179e\") " pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.249916 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.844024 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-67d8547547-ntqwl"] Feb 02 09:12:55 crc kubenswrapper[4747]: W0202 09:12:55.850239 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6682738_3d59_4dee_aca4_df90ecf8179e.slice/crio-c2c24459c873b29dbc78a16647e7c5b3ab96ea93320fae9ef31b7019af644639 WatchSource:0}: Error finding container c2c24459c873b29dbc78a16647e7c5b3ab96ea93320fae9ef31b7019af644639: Status 404 returned error can't find the container with id c2c24459c873b29dbc78a16647e7c5b3ab96ea93320fae9ef31b7019af644639 Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.966572 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.967924 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-central-agent" containerID="cri-o://64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" gracePeriod=30 Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.968210 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-notification-agent" containerID="cri-o://47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" gracePeriod=30 Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.968297 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="proxy-httpd" containerID="cri-o://521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" gracePeriod=30 Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.968313 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="sg-core" containerID="cri-o://e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" gracePeriod=30 Feb 02 09:12:55 crc kubenswrapper[4747]: I0202 09:12:55.986216 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.056152 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-67d8547547-ntqwl" event={"ID":"c6682738-3d59-4dee-aca4-df90ecf8179e","Type":"ContainerStarted","Data":"c2c24459c873b29dbc78a16647e7c5b3ab96ea93320fae9ef31b7019af644639"} Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.651998 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-wm4f5"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.656203 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.668208 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wm4f5"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.745973 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-operator-scripts\") pod \"nova-api-db-create-wm4f5\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.746109 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szd6c\" (UniqueName: \"kubernetes.io/projected/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-kube-api-access-szd6c\") pod \"nova-api-db-create-wm4f5\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.757012 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-q4q4t"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.758432 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.769872 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-q4q4t"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.794004 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-be8f-account-create-update-62ffh"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.795415 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.808023 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.809488 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-be8f-account-create-update-62ffh"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.850482 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szd6c\" (UniqueName: \"kubernetes.io/projected/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-kube-api-access-szd6c\") pod \"nova-api-db-create-wm4f5\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.850771 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99gsm\" (UniqueName: \"kubernetes.io/projected/117bb1fe-06bc-4df1-82b2-901af8bb8287-kube-api-access-99gsm\") pod \"nova-api-be8f-account-create-update-62ffh\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.850851 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/117bb1fe-06bc-4df1-82b2-901af8bb8287-operator-scripts\") pod \"nova-api-be8f-account-create-update-62ffh\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.850968 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77fb1b7e-19e7-4d04-afc7-b55138b71d95-operator-scripts\") pod \"nova-cell0-db-create-q4q4t\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.851153 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-operator-scripts\") pod \"nova-api-db-create-wm4f5\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.851232 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47lll\" (UniqueName: \"kubernetes.io/projected/77fb1b7e-19e7-4d04-afc7-b55138b71d95-kube-api-access-47lll\") pod \"nova-cell0-db-create-q4q4t\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.852286 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-operator-scripts\") pod \"nova-api-db-create-wm4f5\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.879668 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szd6c\" (UniqueName: \"kubernetes.io/projected/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-kube-api-access-szd6c\") pod \"nova-api-db-create-wm4f5\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.891762 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-lpmrk"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.893168 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.901529 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.915885 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-lpmrk"] Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.952356 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc2lz\" (UniqueName: \"kubernetes.io/projected/0f897693-3330-45c3-8c0b-d0fff9970b4b-kube-api-access-gc2lz\") pod \"nova-cell1-db-create-lpmrk\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.952678 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99gsm\" (UniqueName: \"kubernetes.io/projected/117bb1fe-06bc-4df1-82b2-901af8bb8287-kube-api-access-99gsm\") pod \"nova-api-be8f-account-create-update-62ffh\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.952788 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/117bb1fe-06bc-4df1-82b2-901af8bb8287-operator-scripts\") pod \"nova-api-be8f-account-create-update-62ffh\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.952875 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77fb1b7e-19e7-4d04-afc7-b55138b71d95-operator-scripts\") pod \"nova-cell0-db-create-q4q4t\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.952974 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47lll\" (UniqueName: \"kubernetes.io/projected/77fb1b7e-19e7-4d04-afc7-b55138b71d95-kube-api-access-47lll\") pod \"nova-cell0-db-create-q4q4t\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.953058 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f897693-3330-45c3-8c0b-d0fff9970b4b-operator-scripts\") pod \"nova-cell1-db-create-lpmrk\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.953714 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/117bb1fe-06bc-4df1-82b2-901af8bb8287-operator-scripts\") pod \"nova-api-be8f-account-create-update-62ffh\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.961709 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77fb1b7e-19e7-4d04-afc7-b55138b71d95-operator-scripts\") pod \"nova-cell0-db-create-q4q4t\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.974065 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47lll\" (UniqueName: \"kubernetes.io/projected/77fb1b7e-19e7-4d04-afc7-b55138b71d95-kube-api-access-47lll\") pod \"nova-cell0-db-create-q4q4t\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.978332 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99gsm\" (UniqueName: \"kubernetes.io/projected/117bb1fe-06bc-4df1-82b2-901af8bb8287-kube-api-access-99gsm\") pod \"nova-api-be8f-account-create-update-62ffh\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:56 crc kubenswrapper[4747]: I0202 09:12:56.978692 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.005888 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-9429-account-create-update-26wsc"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.010326 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.014159 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.027057 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-9429-account-create-update-26wsc"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.040672 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.054734 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8twm5\" (UniqueName: \"kubernetes.io/projected/a9205288-ed77-41ad-8feb-a3ddbb0646ac-kube-api-access-8twm5\") pod \"nova-cell0-9429-account-create-update-26wsc\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.054794 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc2lz\" (UniqueName: \"kubernetes.io/projected/0f897693-3330-45c3-8c0b-d0fff9970b4b-kube-api-access-gc2lz\") pod \"nova-cell1-db-create-lpmrk\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.054888 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9205288-ed77-41ad-8feb-a3ddbb0646ac-operator-scripts\") pod \"nova-cell0-9429-account-create-update-26wsc\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.054960 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f897693-3330-45c3-8c0b-d0fff9970b4b-operator-scripts\") pod \"nova-cell1-db-create-lpmrk\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.055648 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f897693-3330-45c3-8c0b-d0fff9970b4b-operator-scripts\") pod \"nova-cell1-db-create-lpmrk\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077107 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-dd8a-account-create-update-5dpcd"] Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.077479 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="proxy-httpd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077494 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="proxy-httpd" Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.077511 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-notification-agent" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077519 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-notification-agent" Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.077538 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-central-agent" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077546 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-central-agent" Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.077568 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="sg-core" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077573 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="sg-core" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077717 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-central-agent" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077730 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="sg-core" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077748 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="proxy-httpd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.077757 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="54829773-3e03-44a1-adab-99734d5129ec" containerName="ceilometer-notification-agent" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.078362 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.078752 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc2lz\" (UniqueName: \"kubernetes.io/projected/0f897693-3330-45c3-8c0b-d0fff9970b4b-kube-api-access-gc2lz\") pod \"nova-cell1-db-create-lpmrk\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.080483 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.084791 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-dd8a-account-create-update-5dpcd"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090611 4747 generic.go:334] "Generic (PLEG): container finished" podID="54829773-3e03-44a1-adab-99734d5129ec" containerID="521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" exitCode=0 Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090659 4747 generic.go:334] "Generic (PLEG): container finished" podID="54829773-3e03-44a1-adab-99734d5129ec" containerID="e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" exitCode=2 Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090672 4747 generic.go:334] "Generic (PLEG): container finished" podID="54829773-3e03-44a1-adab-99734d5129ec" containerID="47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" exitCode=0 Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090686 4747 generic.go:334] "Generic (PLEG): container finished" podID="54829773-3e03-44a1-adab-99734d5129ec" containerID="64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" exitCode=0 Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090738 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerDied","Data":"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b"} Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090784 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerDied","Data":"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775"} Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090801 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerDied","Data":"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1"} Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090815 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerDied","Data":"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6"} Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090827 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54829773-3e03-44a1-adab-99734d5129ec","Type":"ContainerDied","Data":"c25877cd6a58093d2ffbe6070c41b602ef27978251cc2f6d48cceaaed8b60a07"} Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.090846 4747 scope.go:117] "RemoveContainer" containerID="521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.091057 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.093394 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.105206 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-67d8547547-ntqwl" event={"ID":"c6682738-3d59-4dee-aca4-df90ecf8179e","Type":"ContainerStarted","Data":"34198bad6b225ceb9f68bcc7f51c2b19ec750e40132919880ca2b786efc48f0a"} Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.105246 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-67d8547547-ntqwl" event={"ID":"c6682738-3d59-4dee-aca4-df90ecf8179e","Type":"ContainerStarted","Data":"0361d739efe38b38cba4ade4f1746348357ddc2043edf8080dd7b812cf0817cc"} Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.105827 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.105886 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.143782 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-67d8547547-ntqwl" podStartSLOduration=3.143757463 podStartE2EDuration="3.143757463s" podCreationTimestamp="2026-02-02 09:12:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:57.134686135 +0000 UTC m=+989.679024568" watchObservedRunningTime="2026-02-02 09:12:57.143757463 +0000 UTC m=+989.688095906" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.147493 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.156554 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-combined-ca-bundle\") pod \"54829773-3e03-44a1-adab-99734d5129ec\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.156609 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-sg-core-conf-yaml\") pod \"54829773-3e03-44a1-adab-99734d5129ec\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.156689 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-scripts\") pod \"54829773-3e03-44a1-adab-99734d5129ec\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.156732 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-run-httpd\") pod \"54829773-3e03-44a1-adab-99734d5129ec\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.156856 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-config-data\") pod \"54829773-3e03-44a1-adab-99734d5129ec\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.156901 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tngt6\" (UniqueName: \"kubernetes.io/projected/54829773-3e03-44a1-adab-99734d5129ec-kube-api-access-tngt6\") pod \"54829773-3e03-44a1-adab-99734d5129ec\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.156945 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-log-httpd\") pod \"54829773-3e03-44a1-adab-99734d5129ec\" (UID: \"54829773-3e03-44a1-adab-99734d5129ec\") " Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.157193 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdh2d\" (UniqueName: \"kubernetes.io/projected/233dd940-ae1f-48d9-acee-ba069d7a93fb-kube-api-access-rdh2d\") pod \"nova-cell1-dd8a-account-create-update-5dpcd\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.157217 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9205288-ed77-41ad-8feb-a3ddbb0646ac-operator-scripts\") pod \"nova-cell0-9429-account-create-update-26wsc\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.157297 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8twm5\" (UniqueName: \"kubernetes.io/projected/a9205288-ed77-41ad-8feb-a3ddbb0646ac-kube-api-access-8twm5\") pod \"nova-cell0-9429-account-create-update-26wsc\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.157334 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233dd940-ae1f-48d9-acee-ba069d7a93fb-operator-scripts\") pod \"nova-cell1-dd8a-account-create-update-5dpcd\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.163164 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9205288-ed77-41ad-8feb-a3ddbb0646ac-operator-scripts\") pod \"nova-cell0-9429-account-create-update-26wsc\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.163445 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "54829773-3e03-44a1-adab-99734d5129ec" (UID: "54829773-3e03-44a1-adab-99734d5129ec"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.163750 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "54829773-3e03-44a1-adab-99734d5129ec" (UID: "54829773-3e03-44a1-adab-99734d5129ec"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.165501 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54829773-3e03-44a1-adab-99734d5129ec-kube-api-access-tngt6" (OuterVolumeSpecName: "kube-api-access-tngt6") pod "54829773-3e03-44a1-adab-99734d5129ec" (UID: "54829773-3e03-44a1-adab-99734d5129ec"). InnerVolumeSpecName "kube-api-access-tngt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.167624 4747 scope.go:117] "RemoveContainer" containerID="e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.167836 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-scripts" (OuterVolumeSpecName: "scripts") pod "54829773-3e03-44a1-adab-99734d5129ec" (UID: "54829773-3e03-44a1-adab-99734d5129ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.189085 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8twm5\" (UniqueName: \"kubernetes.io/projected/a9205288-ed77-41ad-8feb-a3ddbb0646ac-kube-api-access-8twm5\") pod \"nova-cell0-9429-account-create-update-26wsc\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.227893 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.239719 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "54829773-3e03-44a1-adab-99734d5129ec" (UID: "54829773-3e03-44a1-adab-99734d5129ec"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.240065 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m5674" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" probeResult="failure" output=< Feb 02 09:12:57 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:12:57 crc kubenswrapper[4747]: > Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.258756 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233dd940-ae1f-48d9-acee-ba069d7a93fb-operator-scripts\") pod \"nova-cell1-dd8a-account-create-update-5dpcd\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.258847 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdh2d\" (UniqueName: \"kubernetes.io/projected/233dd940-ae1f-48d9-acee-ba069d7a93fb-kube-api-access-rdh2d\") pod \"nova-cell1-dd8a-account-create-update-5dpcd\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.258893 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.258904 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.258914 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tngt6\" (UniqueName: \"kubernetes.io/projected/54829773-3e03-44a1-adab-99734d5129ec-kube-api-access-tngt6\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.258923 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54829773-3e03-44a1-adab-99734d5129ec-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.258931 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.259710 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233dd940-ae1f-48d9-acee-ba069d7a93fb-operator-scripts\") pod \"nova-cell1-dd8a-account-create-update-5dpcd\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.275215 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdh2d\" (UniqueName: \"kubernetes.io/projected/233dd940-ae1f-48d9-acee-ba069d7a93fb-kube-api-access-rdh2d\") pod \"nova-cell1-dd8a-account-create-update-5dpcd\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.322397 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54829773-3e03-44a1-adab-99734d5129ec" (UID: "54829773-3e03-44a1-adab-99734d5129ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.339662 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.361806 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.385077 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-config-data" (OuterVolumeSpecName: "config-data") pod "54829773-3e03-44a1-adab-99734d5129ec" (UID: "54829773-3e03-44a1-adab-99734d5129ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.405887 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.448165 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.464065 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54829773-3e03-44a1-adab-99734d5129ec-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.480823 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.492986 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.495063 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.500340 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.500598 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.521531 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.552297 4747 scope.go:117] "RemoveContainer" containerID="47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.566789 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-log-httpd\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.566844 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.566991 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-scripts\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.567037 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.567055 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fkv9\" (UniqueName: \"kubernetes.io/projected/62df6fd9-5d5e-4d1b-98ac-023e270505cb-kube-api-access-5fkv9\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.567079 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-run-httpd\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.567095 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-config-data\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.577797 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wm4f5"] Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.593031 4747 scope.go:117] "RemoveContainer" containerID="64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.669075 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-scripts\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.669442 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.669464 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fkv9\" (UniqueName: \"kubernetes.io/projected/62df6fd9-5d5e-4d1b-98ac-023e270505cb-kube-api-access-5fkv9\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.669495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-run-httpd\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.669515 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-config-data\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.669558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-log-httpd\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.669574 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.670111 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-run-httpd\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.670627 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-log-httpd\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.675542 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-scripts\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.675722 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-config-data\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.679466 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.679551 4747 scope.go:117] "RemoveContainer" containerID="521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.679882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.681129 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": container with ID starting with 521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b not found: ID does not exist" containerID="521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.681164 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b"} err="failed to get container status \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": rpc error: code = NotFound desc = could not find container \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": container with ID starting with 521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.681188 4747 scope.go:117] "RemoveContainer" containerID="e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.687019 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": container with ID starting with e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775 not found: ID does not exist" containerID="e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.687063 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775"} err="failed to get container status \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": rpc error: code = NotFound desc = could not find container \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": container with ID starting with e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.687089 4747 scope.go:117] "RemoveContainer" containerID="47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.691561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fkv9\" (UniqueName: \"kubernetes.io/projected/62df6fd9-5d5e-4d1b-98ac-023e270505cb-kube-api-access-5fkv9\") pod \"ceilometer-0\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.691657 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": container with ID starting with 47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1 not found: ID does not exist" containerID="47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.691700 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1"} err="failed to get container status \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": rpc error: code = NotFound desc = could not find container \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": container with ID starting with 47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.691725 4747 scope.go:117] "RemoveContainer" containerID="64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" Feb 02 09:12:57 crc kubenswrapper[4747]: E0202 09:12:57.695991 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": container with ID starting with 64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6 not found: ID does not exist" containerID="64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.696014 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6"} err="failed to get container status \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": rpc error: code = NotFound desc = could not find container \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": container with ID starting with 64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.696027 4747 scope.go:117] "RemoveContainer" containerID="521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.699567 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b"} err="failed to get container status \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": rpc error: code = NotFound desc = could not find container \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": container with ID starting with 521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.699583 4747 scope.go:117] "RemoveContainer" containerID="e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700022 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775"} err="failed to get container status \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": rpc error: code = NotFound desc = could not find container \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": container with ID starting with e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700040 4747 scope.go:117] "RemoveContainer" containerID="47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700296 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1"} err="failed to get container status \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": rpc error: code = NotFound desc = could not find container \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": container with ID starting with 47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700310 4747 scope.go:117] "RemoveContainer" containerID="64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700542 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6"} err="failed to get container status \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": rpc error: code = NotFound desc = could not find container \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": container with ID starting with 64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700555 4747 scope.go:117] "RemoveContainer" containerID="521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700795 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b"} err="failed to get container status \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": rpc error: code = NotFound desc = could not find container \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": container with ID starting with 521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.700809 4747 scope.go:117] "RemoveContainer" containerID="e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.701019 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775"} err="failed to get container status \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": rpc error: code = NotFound desc = could not find container \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": container with ID starting with e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.701034 4747 scope.go:117] "RemoveContainer" containerID="47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.702521 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1"} err="failed to get container status \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": rpc error: code = NotFound desc = could not find container \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": container with ID starting with 47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.702551 4747 scope.go:117] "RemoveContainer" containerID="64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.709992 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6"} err="failed to get container status \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": rpc error: code = NotFound desc = could not find container \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": container with ID starting with 64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.710014 4747 scope.go:117] "RemoveContainer" containerID="521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.711394 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b"} err="failed to get container status \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": rpc error: code = NotFound desc = could not find container \"521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b\": container with ID starting with 521b2b89218149e1d9f92f3fc84756457d3ba22ff6b02f3e028cf7692604809b not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.711413 4747 scope.go:117] "RemoveContainer" containerID="e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.713481 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775"} err="failed to get container status \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": rpc error: code = NotFound desc = could not find container \"e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775\": container with ID starting with e69aa85603938fe8652dd5af73b7789413dfa7b3df9128c7f7c861ebd615d775 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.713503 4747 scope.go:117] "RemoveContainer" containerID="47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.713762 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1"} err="failed to get container status \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": rpc error: code = NotFound desc = could not find container \"47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1\": container with ID starting with 47e580a9e725133c9e6bf5d250cd17497bfb2e73efa0d24d94a407726f627ae1 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.713776 4747 scope.go:117] "RemoveContainer" containerID="64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.717428 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6"} err="failed to get container status \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": rpc error: code = NotFound desc = could not find container \"64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6\": container with ID starting with 64e3a950321255d96ef2ee83a1fa7888871536ad50dd1c8034193049fc7892b6 not found: ID does not exist" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.806253 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-q4q4t"] Feb 02 09:12:57 crc kubenswrapper[4747]: W0202 09:12:57.817341 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77fb1b7e_19e7_4d04_afc7_b55138b71d95.slice/crio-6f2c4d36b49964f6d9f3d570e64eb9f5cba3895bb61eb2e613e757ea2a83615f WatchSource:0}: Error finding container 6f2c4d36b49964f6d9f3d570e64eb9f5cba3895bb61eb2e613e757ea2a83615f: Status 404 returned error can't find the container with id 6f2c4d36b49964f6d9f3d570e64eb9f5cba3895bb61eb2e613e757ea2a83615f Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.831397 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:12:57 crc kubenswrapper[4747]: I0202 09:12:57.901380 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-lpmrk"] Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.054263 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-be8f-account-create-update-62ffh"] Feb 02 09:12:58 crc kubenswrapper[4747]: W0202 09:12:58.091771 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9205288_ed77_41ad_8feb_a3ddbb0646ac.slice/crio-47f20fc647561196d64eb529f0c22e2201abb5e99ba9086b43e0ecc818e0b03f WatchSource:0}: Error finding container 47f20fc647561196d64eb529f0c22e2201abb5e99ba9086b43e0ecc818e0b03f: Status 404 returned error can't find the container with id 47f20fc647561196d64eb529f0c22e2201abb5e99ba9086b43e0ecc818e0b03f Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.101762 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-9429-account-create-update-26wsc"] Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.188025 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9429-account-create-update-26wsc" event={"ID":"a9205288-ed77-41ad-8feb-a3ddbb0646ac","Type":"ContainerStarted","Data":"47f20fc647561196d64eb529f0c22e2201abb5e99ba9086b43e0ecc818e0b03f"} Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.265469 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q4q4t" event={"ID":"77fb1b7e-19e7-4d04-afc7-b55138b71d95","Type":"ContainerStarted","Data":"6f2c4d36b49964f6d9f3d570e64eb9f5cba3895bb61eb2e613e757ea2a83615f"} Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.272206 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-be8f-account-create-update-62ffh" event={"ID":"117bb1fe-06bc-4df1-82b2-901af8bb8287","Type":"ContainerStarted","Data":"8049aad79b47e0ade1d77727c28dad5362c814fbb8c34eabe1bcc2a9ea84370d"} Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.284419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lpmrk" event={"ID":"0f897693-3330-45c3-8c0b-d0fff9970b4b","Type":"ContainerStarted","Data":"1e16e8326b3e892f511bb02c7ae0a4e26b77b35067cb12eec320fc4ce9128985"} Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.285164 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-dd8a-account-create-update-5dpcd"] Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.291040 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wm4f5" event={"ID":"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa","Type":"ContainerStarted","Data":"0476a2c507e535ce5fafcb4d21bb793557dd7ef2ffd877c1bf92b3519d343ead"} Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.291091 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wm4f5" event={"ID":"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa","Type":"ContainerStarted","Data":"01ca2e6bf3808f997f2dd8b3c2f2f86dd052ed4625ec69cfe54157c8b3b1c977"} Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.318305 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-lpmrk" podStartSLOduration=2.318280314 podStartE2EDuration="2.318280314s" podCreationTimestamp="2026-02-02 09:12:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:58.30458886 +0000 UTC m=+990.848927293" watchObservedRunningTime="2026-02-02 09:12:58.318280314 +0000 UTC m=+990.862618747" Feb 02 09:12:58 crc kubenswrapper[4747]: W0202 09:12:58.324759 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod233dd940_ae1f_48d9_acee_ba069d7a93fb.slice/crio-f2ed53c6a0c20c1ba90d292efdeed53196bc2bf5f5ee398ba9ab53571c65ae5d WatchSource:0}: Error finding container f2ed53c6a0c20c1ba90d292efdeed53196bc2bf5f5ee398ba9ab53571c65ae5d: Status 404 returned error can't find the container with id f2ed53c6a0c20c1ba90d292efdeed53196bc2bf5f5ee398ba9ab53571c65ae5d Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.324959 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-wm4f5" podStartSLOduration=2.324924061 podStartE2EDuration="2.324924061s" podCreationTimestamp="2026-02-02 09:12:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:12:58.324880129 +0000 UTC m=+990.869218562" watchObservedRunningTime="2026-02-02 09:12:58.324924061 +0000 UTC m=+990.869262494" Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.370578 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54829773-3e03-44a1-adab-99734d5129ec" path="/var/lib/kubelet/pods/54829773-3e03-44a1-adab-99734d5129ec/volumes" Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.527148 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Feb 02 09:12:58 crc kubenswrapper[4747]: I0202 09:12:58.590021 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.309497 4747 generic.go:334] "Generic (PLEG): container finished" podID="77fb1b7e-19e7-4d04-afc7-b55138b71d95" containerID="6b01ac03b256a3515e17f65bb4cff1287ab3eddb5bc5d11649adef74d9503171" exitCode=0 Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.309621 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q4q4t" event={"ID":"77fb1b7e-19e7-4d04-afc7-b55138b71d95","Type":"ContainerDied","Data":"6b01ac03b256a3515e17f65bb4cff1287ab3eddb5bc5d11649adef74d9503171"} Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.316470 4747 generic.go:334] "Generic (PLEG): container finished" podID="117bb1fe-06bc-4df1-82b2-901af8bb8287" containerID="91ac5a4e6796a03cba0420ab6f710cb5e6874f93e48f8aa67d401e422e4272c4" exitCode=0 Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.316575 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-be8f-account-create-update-62ffh" event={"ID":"117bb1fe-06bc-4df1-82b2-901af8bb8287","Type":"ContainerDied","Data":"91ac5a4e6796a03cba0420ab6f710cb5e6874f93e48f8aa67d401e422e4272c4"} Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.332140 4747 generic.go:334] "Generic (PLEG): container finished" podID="0f897693-3330-45c3-8c0b-d0fff9970b4b" containerID="f2375400f72549bab36ff9dba42c1e3614064d8ee53e69aa1445bdca98966476" exitCode=0 Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.332346 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lpmrk" event={"ID":"0f897693-3330-45c3-8c0b-d0fff9970b4b","Type":"ContainerDied","Data":"f2375400f72549bab36ff9dba42c1e3614064d8ee53e69aa1445bdca98966476"} Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.336894 4747 generic.go:334] "Generic (PLEG): container finished" podID="ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa" containerID="0476a2c507e535ce5fafcb4d21bb793557dd7ef2ffd877c1bf92b3519d343ead" exitCode=0 Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.336964 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wm4f5" event={"ID":"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa","Type":"ContainerDied","Data":"0476a2c507e535ce5fafcb4d21bb793557dd7ef2ffd877c1bf92b3519d343ead"} Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.339093 4747 generic.go:334] "Generic (PLEG): container finished" podID="233dd940-ae1f-48d9-acee-ba069d7a93fb" containerID="14c87cc8611852653ad444035133d8c121e223ed2527e2609e76c6506b6b30e0" exitCode=0 Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.339138 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" event={"ID":"233dd940-ae1f-48d9-acee-ba069d7a93fb","Type":"ContainerDied","Data":"14c87cc8611852653ad444035133d8c121e223ed2527e2609e76c6506b6b30e0"} Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.339154 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" event={"ID":"233dd940-ae1f-48d9-acee-ba069d7a93fb","Type":"ContainerStarted","Data":"f2ed53c6a0c20c1ba90d292efdeed53196bc2bf5f5ee398ba9ab53571c65ae5d"} Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.351942 4747 generic.go:334] "Generic (PLEG): container finished" podID="a9205288-ed77-41ad-8feb-a3ddbb0646ac" containerID="7d13ad68862e10421a053d1361ac149bd392fbc361043b481fc12bf271dd4737" exitCode=0 Feb 02 09:12:59 crc kubenswrapper[4747]: I0202 09:12:59.351978 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9429-account-create-update-26wsc" event={"ID":"a9205288-ed77-41ad-8feb-a3ddbb0646ac","Type":"ContainerDied","Data":"7d13ad68862e10421a053d1361ac149bd392fbc361043b481fc12bf271dd4737"} Feb 02 09:13:02 crc kubenswrapper[4747]: I0202 09:13:02.840957 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7fb8f585c-hlmn8" Feb 02 09:13:02 crc kubenswrapper[4747]: I0202 09:13:02.902645 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55c4f4d6c6-jtktd"] Feb 02 09:13:02 crc kubenswrapper[4747]: I0202 09:13:02.902860 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55c4f4d6c6-jtktd" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-api" containerID="cri-o://540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623" gracePeriod=30 Feb 02 09:13:02 crc kubenswrapper[4747]: I0202 09:13:02.903256 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55c4f4d6c6-jtktd" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-httpd" containerID="cri-o://3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3" gracePeriod=30 Feb 02 09:13:03 crc kubenswrapper[4747]: I0202 09:13:03.393055 4747 generic.go:334] "Generic (PLEG): container finished" podID="5760787c-4f47-4115-bd6d-12f036d73793" containerID="3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3" exitCode=0 Feb 02 09:13:03 crc kubenswrapper[4747]: I0202 09:13:03.393351 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55c4f4d6c6-jtktd" event={"ID":"5760787c-4f47-4115-bd6d-12f036d73793","Type":"ContainerDied","Data":"3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3"} Feb 02 09:13:04 crc kubenswrapper[4747]: W0202 09:13:04.441340 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62df6fd9_5d5e_4d1b_98ac_023e270505cb.slice/crio-50a21be27c33e0a7a084f0a9673fe9fc88a750288805fd03264dfd81e5078760 WatchSource:0}: Error finding container 50a21be27c33e0a7a084f0a9673fe9fc88a750288805fd03264dfd81e5078760: Status 404 returned error can't find the container with id 50a21be27c33e0a7a084f0a9673fe9fc88a750288805fd03264dfd81e5078760 Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.564320 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.573952 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.598457 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.617593 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szd6c\" (UniqueName: \"kubernetes.io/projected/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-kube-api-access-szd6c\") pod \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.617702 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f897693-3330-45c3-8c0b-d0fff9970b4b-operator-scripts\") pod \"0f897693-3330-45c3-8c0b-d0fff9970b4b\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.617738 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc2lz\" (UniqueName: \"kubernetes.io/projected/0f897693-3330-45c3-8c0b-d0fff9970b4b-kube-api-access-gc2lz\") pod \"0f897693-3330-45c3-8c0b-d0fff9970b4b\" (UID: \"0f897693-3330-45c3-8c0b-d0fff9970b4b\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.617821 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-operator-scripts\") pod \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\" (UID: \"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.618757 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa" (UID: "ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.619126 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f897693-3330-45c3-8c0b-d0fff9970b4b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0f897693-3330-45c3-8c0b-d0fff9970b4b" (UID: "0f897693-3330-45c3-8c0b-d0fff9970b4b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.622956 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-kube-api-access-szd6c" (OuterVolumeSpecName: "kube-api-access-szd6c") pod "ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa" (UID: "ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa"). InnerVolumeSpecName "kube-api-access-szd6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.623117 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f897693-3330-45c3-8c0b-d0fff9970b4b-kube-api-access-gc2lz" (OuterVolumeSpecName: "kube-api-access-gc2lz") pod "0f897693-3330-45c3-8c0b-d0fff9970b4b" (UID: "0f897693-3330-45c3-8c0b-d0fff9970b4b"). InnerVolumeSpecName "kube-api-access-gc2lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.719506 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szd6c\" (UniqueName: \"kubernetes.io/projected/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-kube-api-access-szd6c\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.719545 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f897693-3330-45c3-8c0b-d0fff9970b4b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.719558 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc2lz\" (UniqueName: \"kubernetes.io/projected/0f897693-3330-45c3-8c0b-d0fff9970b4b-kube-api-access-gc2lz\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.719571 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.752188 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.759060 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.799005 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.816844 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.821109 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdh2d\" (UniqueName: \"kubernetes.io/projected/233dd940-ae1f-48d9-acee-ba069d7a93fb-kube-api-access-rdh2d\") pod \"233dd940-ae1f-48d9-acee-ba069d7a93fb\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.821268 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233dd940-ae1f-48d9-acee-ba069d7a93fb-operator-scripts\") pod \"233dd940-ae1f-48d9-acee-ba069d7a93fb\" (UID: \"233dd940-ae1f-48d9-acee-ba069d7a93fb\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.821402 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8twm5\" (UniqueName: \"kubernetes.io/projected/a9205288-ed77-41ad-8feb-a3ddbb0646ac-kube-api-access-8twm5\") pod \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.821431 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9205288-ed77-41ad-8feb-a3ddbb0646ac-operator-scripts\") pod \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\" (UID: \"a9205288-ed77-41ad-8feb-a3ddbb0646ac\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.822653 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9205288-ed77-41ad-8feb-a3ddbb0646ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9205288-ed77-41ad-8feb-a3ddbb0646ac" (UID: "a9205288-ed77-41ad-8feb-a3ddbb0646ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.823559 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/233dd940-ae1f-48d9-acee-ba069d7a93fb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "233dd940-ae1f-48d9-acee-ba069d7a93fb" (UID: "233dd940-ae1f-48d9-acee-ba069d7a93fb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.830474 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/233dd940-ae1f-48d9-acee-ba069d7a93fb-kube-api-access-rdh2d" (OuterVolumeSpecName: "kube-api-access-rdh2d") pod "233dd940-ae1f-48d9-acee-ba069d7a93fb" (UID: "233dd940-ae1f-48d9-acee-ba069d7a93fb"). InnerVolumeSpecName "kube-api-access-rdh2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.840891 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9205288-ed77-41ad-8feb-a3ddbb0646ac-kube-api-access-8twm5" (OuterVolumeSpecName: "kube-api-access-8twm5") pod "a9205288-ed77-41ad-8feb-a3ddbb0646ac" (UID: "a9205288-ed77-41ad-8feb-a3ddbb0646ac"). InnerVolumeSpecName "kube-api-access-8twm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923040 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47lll\" (UniqueName: \"kubernetes.io/projected/77fb1b7e-19e7-4d04-afc7-b55138b71d95-kube-api-access-47lll\") pod \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923241 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99gsm\" (UniqueName: \"kubernetes.io/projected/117bb1fe-06bc-4df1-82b2-901af8bb8287-kube-api-access-99gsm\") pod \"117bb1fe-06bc-4df1-82b2-901af8bb8287\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923377 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/117bb1fe-06bc-4df1-82b2-901af8bb8287-operator-scripts\") pod \"117bb1fe-06bc-4df1-82b2-901af8bb8287\" (UID: \"117bb1fe-06bc-4df1-82b2-901af8bb8287\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923468 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77fb1b7e-19e7-4d04-afc7-b55138b71d95-operator-scripts\") pod \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\" (UID: \"77fb1b7e-19e7-4d04-afc7-b55138b71d95\") " Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923830 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233dd940-ae1f-48d9-acee-ba069d7a93fb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923850 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8twm5\" (UniqueName: \"kubernetes.io/projected/a9205288-ed77-41ad-8feb-a3ddbb0646ac-kube-api-access-8twm5\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923862 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9205288-ed77-41ad-8feb-a3ddbb0646ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923872 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdh2d\" (UniqueName: \"kubernetes.io/projected/233dd940-ae1f-48d9-acee-ba069d7a93fb-kube-api-access-rdh2d\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.923878 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/117bb1fe-06bc-4df1-82b2-901af8bb8287-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "117bb1fe-06bc-4df1-82b2-901af8bb8287" (UID: "117bb1fe-06bc-4df1-82b2-901af8bb8287"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.924113 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77fb1b7e-19e7-4d04-afc7-b55138b71d95-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "77fb1b7e-19e7-4d04-afc7-b55138b71d95" (UID: "77fb1b7e-19e7-4d04-afc7-b55138b71d95"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.926901 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/117bb1fe-06bc-4df1-82b2-901af8bb8287-kube-api-access-99gsm" (OuterVolumeSpecName: "kube-api-access-99gsm") pod "117bb1fe-06bc-4df1-82b2-901af8bb8287" (UID: "117bb1fe-06bc-4df1-82b2-901af8bb8287"). InnerVolumeSpecName "kube-api-access-99gsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:04 crc kubenswrapper[4747]: I0202 09:13:04.927158 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77fb1b7e-19e7-4d04-afc7-b55138b71d95-kube-api-access-47lll" (OuterVolumeSpecName: "kube-api-access-47lll") pod "77fb1b7e-19e7-4d04-afc7-b55138b71d95" (UID: "77fb1b7e-19e7-4d04-afc7-b55138b71d95"). InnerVolumeSpecName "kube-api-access-47lll". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.025205 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/117bb1fe-06bc-4df1-82b2-901af8bb8287-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.025506 4747 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77fb1b7e-19e7-4d04-afc7-b55138b71d95-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.025516 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47lll\" (UniqueName: \"kubernetes.io/projected/77fb1b7e-19e7-4d04-afc7-b55138b71d95-kube-api-access-47lll\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.025529 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99gsm\" (UniqueName: \"kubernetes.io/projected/117bb1fe-06bc-4df1-82b2-901af8bb8287-kube-api-access-99gsm\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.255444 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.258396 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-67d8547547-ntqwl" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.414554 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerStarted","Data":"61bbd0ff212273713f72a5a99b8951b9509b3c63697d2043b59e598b4d4c20d8"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.414600 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerStarted","Data":"50a21be27c33e0a7a084f0a9673fe9fc88a750288805fd03264dfd81e5078760"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.416495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-be8f-account-create-update-62ffh" event={"ID":"117bb1fe-06bc-4df1-82b2-901af8bb8287","Type":"ContainerDied","Data":"8049aad79b47e0ade1d77727c28dad5362c814fbb8c34eabe1bcc2a9ea84370d"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.416519 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8049aad79b47e0ade1d77727c28dad5362c814fbb8c34eabe1bcc2a9ea84370d" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.416567 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-be8f-account-create-update-62ffh" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.425581 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9429-account-create-update-26wsc" event={"ID":"a9205288-ed77-41ad-8feb-a3ddbb0646ac","Type":"ContainerDied","Data":"47f20fc647561196d64eb529f0c22e2201abb5e99ba9086b43e0ecc818e0b03f"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.425625 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47f20fc647561196d64eb529f0c22e2201abb5e99ba9086b43e0ecc818e0b03f" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.425668 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9429-account-create-update-26wsc" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.427329 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"54560832-e0ee-4493-a567-b4a3e7ca4e8f","Type":"ContainerStarted","Data":"917ecb50c69582f27b0d5a31e0b14dbe814231525ba1ad0347b1e8629b46782d"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.433801 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wm4f5" event={"ID":"ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa","Type":"ContainerDied","Data":"01ca2e6bf3808f997f2dd8b3c2f2f86dd052ed4625ec69cfe54157c8b3b1c977"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.433837 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01ca2e6bf3808f997f2dd8b3c2f2f86dd052ed4625ec69cfe54157c8b3b1c977" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.433895 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wm4f5" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.440051 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q4q4t" event={"ID":"77fb1b7e-19e7-4d04-afc7-b55138b71d95","Type":"ContainerDied","Data":"6f2c4d36b49964f6d9f3d570e64eb9f5cba3895bb61eb2e613e757ea2a83615f"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.440078 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f2c4d36b49964f6d9f3d570e64eb9f5cba3895bb61eb2e613e757ea2a83615f" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.440128 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q4q4t" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.467035 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lpmrk" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.467044 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lpmrk" event={"ID":"0f897693-3330-45c3-8c0b-d0fff9970b4b","Type":"ContainerDied","Data":"1e16e8326b3e892f511bb02c7ae0a4e26b77b35067cb12eec320fc4ce9128985"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.467079 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e16e8326b3e892f511bb02c7ae0a4e26b77b35067cb12eec320fc4ce9128985" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.471868 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.472374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-dd8a-account-create-update-5dpcd" event={"ID":"233dd940-ae1f-48d9-acee-ba069d7a93fb","Type":"ContainerDied","Data":"f2ed53c6a0c20c1ba90d292efdeed53196bc2bf5f5ee398ba9ab53571c65ae5d"} Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.472417 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2ed53c6a0c20c1ba90d292efdeed53196bc2bf5f5ee398ba9ab53571c65ae5d" Feb 02 09:13:05 crc kubenswrapper[4747]: I0202 09:13:05.600557 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.986165074 podStartE2EDuration="14.600535393s" podCreationTimestamp="2026-02-02 09:12:51 +0000 UTC" firstStartedPulling="2026-02-02 09:12:51.947206059 +0000 UTC m=+984.491544512" lastFinishedPulling="2026-02-02 09:13:04.561576388 +0000 UTC m=+997.105914831" observedRunningTime="2026-02-02 09:13:05.456853223 +0000 UTC m=+998.001191656" watchObservedRunningTime="2026-02-02 09:13:05.600535393 +0000 UTC m=+998.144873826" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.163500 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.246294 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.256409 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-config\") pod \"5760787c-4f47-4115-bd6d-12f036d73793\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.256704 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-combined-ca-bundle\") pod \"5760787c-4f47-4115-bd6d-12f036d73793\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.256891 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnppm\" (UniqueName: \"kubernetes.io/projected/5760787c-4f47-4115-bd6d-12f036d73793-kube-api-access-gnppm\") pod \"5760787c-4f47-4115-bd6d-12f036d73793\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.257047 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-ovndb-tls-certs\") pod \"5760787c-4f47-4115-bd6d-12f036d73793\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.257154 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-httpd-config\") pod \"5760787c-4f47-4115-bd6d-12f036d73793\" (UID: \"5760787c-4f47-4115-bd6d-12f036d73793\") " Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.263526 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "5760787c-4f47-4115-bd6d-12f036d73793" (UID: "5760787c-4f47-4115-bd6d-12f036d73793"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.268827 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5760787c-4f47-4115-bd6d-12f036d73793-kube-api-access-gnppm" (OuterVolumeSpecName: "kube-api-access-gnppm") pod "5760787c-4f47-4115-bd6d-12f036d73793" (UID: "5760787c-4f47-4115-bd6d-12f036d73793"). InnerVolumeSpecName "kube-api-access-gnppm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.305701 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.317683 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-config" (OuterVolumeSpecName: "config") pod "5760787c-4f47-4115-bd6d-12f036d73793" (UID: "5760787c-4f47-4115-bd6d-12f036d73793"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.325113 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5760787c-4f47-4115-bd6d-12f036d73793" (UID: "5760787c-4f47-4115-bd6d-12f036d73793"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.347626 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "5760787c-4f47-4115-bd6d-12f036d73793" (UID: "5760787c-4f47-4115-bd6d-12f036d73793"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.359001 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.359040 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnppm\" (UniqueName: \"kubernetes.io/projected/5760787c-4f47-4115-bd6d-12f036d73793-kube-api-access-gnppm\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.359055 4747 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.359069 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.359081 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5760787c-4f47-4115-bd6d-12f036d73793-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.483653 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerStarted","Data":"1c4e7937411ef2e54b711a59e1455ba2ebd6fd13e7197f29f60574022cc44e74"} Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.488377 4747 generic.go:334] "Generic (PLEG): container finished" podID="5760787c-4f47-4115-bd6d-12f036d73793" containerID="540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623" exitCode=0 Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.488451 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55c4f4d6c6-jtktd" event={"ID":"5760787c-4f47-4115-bd6d-12f036d73793","Type":"ContainerDied","Data":"540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623"} Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.488491 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55c4f4d6c6-jtktd" event={"ID":"5760787c-4f47-4115-bd6d-12f036d73793","Type":"ContainerDied","Data":"143bc1289bd91924df2610001740ca9eced88ec813732e26ea6f5d0a300299e9"} Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.488508 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55c4f4d6c6-jtktd" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.488516 4747 scope.go:117] "RemoveContainer" containerID="3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.496062 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m5674"] Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.523474 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55c4f4d6c6-jtktd"] Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.526969 4747 scope.go:117] "RemoveContainer" containerID="540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.533733 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-55c4f4d6c6-jtktd"] Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.552920 4747 scope.go:117] "RemoveContainer" containerID="3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3" Feb 02 09:13:06 crc kubenswrapper[4747]: E0202 09:13:06.553463 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3\": container with ID starting with 3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3 not found: ID does not exist" containerID="3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.553508 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3"} err="failed to get container status \"3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3\": rpc error: code = NotFound desc = could not find container \"3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3\": container with ID starting with 3ad37663056f6cdf73082c1f71959f116e65e7d6181d10cfa553caf02e3bc8c3 not found: ID does not exist" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.553537 4747 scope.go:117] "RemoveContainer" containerID="540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623" Feb 02 09:13:06 crc kubenswrapper[4747]: E0202 09:13:06.554109 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623\": container with ID starting with 540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623 not found: ID does not exist" containerID="540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623" Feb 02 09:13:06 crc kubenswrapper[4747]: I0202 09:13:06.554151 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623"} err="failed to get container status \"540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623\": rpc error: code = NotFound desc = could not find container \"540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623\": container with ID starting with 540b73b30ae67d025e8bcb1292692da7d170a41fcab6a1f99d56fb845f554623 not found: ID does not exist" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.282823 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rvtdm"] Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283521 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283539 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283556 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-httpd" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283564 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-httpd" Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283576 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77fb1b7e-19e7-4d04-afc7-b55138b71d95" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283583 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="77fb1b7e-19e7-4d04-afc7-b55138b71d95" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283601 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-api" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283607 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-api" Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283613 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9205288-ed77-41ad-8feb-a3ddbb0646ac" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283619 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9205288-ed77-41ad-8feb-a3ddbb0646ac" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283628 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f897693-3330-45c3-8c0b-d0fff9970b4b" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283635 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f897693-3330-45c3-8c0b-d0fff9970b4b" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283650 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117bb1fe-06bc-4df1-82b2-901af8bb8287" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283656 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="117bb1fe-06bc-4df1-82b2-901af8bb8287" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: E0202 09:13:07.283664 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="233dd940-ae1f-48d9-acee-ba069d7a93fb" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283670 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="233dd940-ae1f-48d9-acee-ba069d7a93fb" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283844 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="233dd940-ae1f-48d9-acee-ba069d7a93fb" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283854 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="77fb1b7e-19e7-4d04-afc7-b55138b71d95" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283865 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283873 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f897693-3330-45c3-8c0b-d0fff9970b4b" containerName="mariadb-database-create" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283880 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9205288-ed77-41ad-8feb-a3ddbb0646ac" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283894 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="117bb1fe-06bc-4df1-82b2-901af8bb8287" containerName="mariadb-account-create-update" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283906 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-httpd" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.283917 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5760787c-4f47-4115-bd6d-12f036d73793" containerName="neutron-api" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.284468 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.286176 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.286431 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-9cp66" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.286621 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.309016 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rvtdm"] Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.374582 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.374655 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-config-data\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.374695 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-scripts\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.374745 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4q9k\" (UniqueName: \"kubernetes.io/projected/742b6645-afca-42df-9aad-6b6e1e93790b-kube-api-access-p4q9k\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.476493 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.476773 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-config-data\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.476869 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-scripts\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.477004 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4q9k\" (UniqueName: \"kubernetes.io/projected/742b6645-afca-42df-9aad-6b6e1e93790b-kube-api-access-p4q9k\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.482723 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.482799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-config-data\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.483756 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-scripts\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.492451 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4q9k\" (UniqueName: \"kubernetes.io/projected/742b6645-afca-42df-9aad-6b6e1e93790b-kube-api-access-p4q9k\") pod \"nova-cell0-conductor-db-sync-rvtdm\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.499384 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerStarted","Data":"3a51f8964cbca25b292aeb1e28884bf322289e65a61951cf9b185a7073b310b6"} Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.500514 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m5674" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" containerID="cri-o://a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d" gracePeriod=2 Feb 02 09:13:07 crc kubenswrapper[4747]: I0202 09:13:07.613135 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.054222 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.192468 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvdrx\" (UniqueName: \"kubernetes.io/projected/dc871587-061f-4dbd-8b3b-06c268e9adb0-kube-api-access-gvdrx\") pod \"dc871587-061f-4dbd-8b3b-06c268e9adb0\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.192555 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-catalog-content\") pod \"dc871587-061f-4dbd-8b3b-06c268e9adb0\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.192613 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-utilities\") pod \"dc871587-061f-4dbd-8b3b-06c268e9adb0\" (UID: \"dc871587-061f-4dbd-8b3b-06c268e9adb0\") " Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.193630 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-utilities" (OuterVolumeSpecName: "utilities") pod "dc871587-061f-4dbd-8b3b-06c268e9adb0" (UID: "dc871587-061f-4dbd-8b3b-06c268e9adb0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.199802 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc871587-061f-4dbd-8b3b-06c268e9adb0-kube-api-access-gvdrx" (OuterVolumeSpecName: "kube-api-access-gvdrx") pod "dc871587-061f-4dbd-8b3b-06c268e9adb0" (UID: "dc871587-061f-4dbd-8b3b-06c268e9adb0"). InnerVolumeSpecName "kube-api-access-gvdrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:08 crc kubenswrapper[4747]: W0202 09:13:08.224891 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod742b6645_afca_42df_9aad_6b6e1e93790b.slice/crio-efc9c61cd5d16118397665b2c51a9c249f23ef97e68a25f1441823f6e93859ff WatchSource:0}: Error finding container efc9c61cd5d16118397665b2c51a9c249f23ef97e68a25f1441823f6e93859ff: Status 404 returned error can't find the container with id efc9c61cd5d16118397665b2c51a9c249f23ef97e68a25f1441823f6e93859ff Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.226771 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rvtdm"] Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.295270 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvdrx\" (UniqueName: \"kubernetes.io/projected/dc871587-061f-4dbd-8b3b-06c268e9adb0-kube-api-access-gvdrx\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.295306 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.348893 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc871587-061f-4dbd-8b3b-06c268e9adb0" (UID: "dc871587-061f-4dbd-8b3b-06c268e9adb0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.354485 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5760787c-4f47-4115-bd6d-12f036d73793" path="/var/lib/kubelet/pods/5760787c-4f47-4115-bd6d-12f036d73793/volumes" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.397307 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc871587-061f-4dbd-8b3b-06c268e9adb0-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.510865 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" event={"ID":"742b6645-afca-42df-9aad-6b6e1e93790b","Type":"ContainerStarted","Data":"efc9c61cd5d16118397665b2c51a9c249f23ef97e68a25f1441823f6e93859ff"} Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.513461 4747 generic.go:334] "Generic (PLEG): container finished" podID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerID="a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d" exitCode=0 Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.513513 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5674" event={"ID":"dc871587-061f-4dbd-8b3b-06c268e9adb0","Type":"ContainerDied","Data":"a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d"} Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.513547 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5674" event={"ID":"dc871587-061f-4dbd-8b3b-06c268e9adb0","Type":"ContainerDied","Data":"e7ccfebaca3418990d3c1725e56b37b03cfaff70f05b26112fa9d79f51da42d3"} Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.513518 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5674" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.513571 4747 scope.go:117] "RemoveContainer" containerID="a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.533156 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-69bf4987b8-zq2rd" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.567851 4747 scope.go:117] "RemoveContainer" containerID="e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.579996 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m5674"] Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.591977 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m5674"] Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.592210 4747 scope.go:117] "RemoveContainer" containerID="305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.664590 4747 scope.go:117] "RemoveContainer" containerID="a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d" Feb 02 09:13:08 crc kubenswrapper[4747]: E0202 09:13:08.668071 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d\": container with ID starting with a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d not found: ID does not exist" containerID="a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.668248 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d"} err="failed to get container status \"a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d\": rpc error: code = NotFound desc = could not find container \"a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d\": container with ID starting with a4999bb9629281aae44f5ff504d97e69660cb3609ed8786005d4bb04c130cf9d not found: ID does not exist" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.668350 4747 scope.go:117] "RemoveContainer" containerID="e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab" Feb 02 09:13:08 crc kubenswrapper[4747]: E0202 09:13:08.670041 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab\": container with ID starting with e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab not found: ID does not exist" containerID="e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.670422 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab"} err="failed to get container status \"e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab\": rpc error: code = NotFound desc = could not find container \"e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab\": container with ID starting with e0bf7d4bc9acd36304b74fa1fece0b1c623f322499d9253c3581ee55a48b75ab not found: ID does not exist" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.670545 4747 scope.go:117] "RemoveContainer" containerID="305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6" Feb 02 09:13:08 crc kubenswrapper[4747]: E0202 09:13:08.673130 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6\": container with ID starting with 305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6 not found: ID does not exist" containerID="305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6" Feb 02 09:13:08 crc kubenswrapper[4747]: I0202 09:13:08.673166 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6"} err="failed to get container status \"305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6\": rpc error: code = NotFound desc = could not find container \"305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6\": container with ID starting with 305118a2a78e2be8a63520fc47bd12572e34eeebd7436d7b39d443bc073647d6 not found: ID does not exist" Feb 02 09:13:09 crc kubenswrapper[4747]: I0202 09:13:09.526121 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerStarted","Data":"e83299fadd886c9eb3470cdd70775eae10f8c497d38526d029a42b15d9541d83"} Feb 02 09:13:09 crc kubenswrapper[4747]: I0202 09:13:09.526275 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-central-agent" containerID="cri-o://61bbd0ff212273713f72a5a99b8951b9509b3c63697d2043b59e598b4d4c20d8" gracePeriod=30 Feb 02 09:13:09 crc kubenswrapper[4747]: I0202 09:13:09.526309 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="proxy-httpd" containerID="cri-o://e83299fadd886c9eb3470cdd70775eae10f8c497d38526d029a42b15d9541d83" gracePeriod=30 Feb 02 09:13:09 crc kubenswrapper[4747]: I0202 09:13:09.526322 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="sg-core" containerID="cri-o://3a51f8964cbca25b292aeb1e28884bf322289e65a61951cf9b185a7073b310b6" gracePeriod=30 Feb 02 09:13:09 crc kubenswrapper[4747]: I0202 09:13:09.526332 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-notification-agent" containerID="cri-o://1c4e7937411ef2e54b711a59e1455ba2ebd6fd13e7197f29f60574022cc44e74" gracePeriod=30 Feb 02 09:13:09 crc kubenswrapper[4747]: I0202 09:13:09.526620 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 09:13:09 crc kubenswrapper[4747]: I0202 09:13:09.567669 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=8.17854202 podStartE2EDuration="12.567648187s" podCreationTimestamp="2026-02-02 09:12:57 +0000 UTC" firstStartedPulling="2026-02-02 09:13:04.506918325 +0000 UTC m=+997.051256758" lastFinishedPulling="2026-02-02 09:13:08.896024492 +0000 UTC m=+1001.440362925" observedRunningTime="2026-02-02 09:13:09.554794674 +0000 UTC m=+1002.099133107" watchObservedRunningTime="2026-02-02 09:13:09.567648187 +0000 UTC m=+1002.111986620" Feb 02 09:13:10 crc kubenswrapper[4747]: I0202 09:13:10.368974 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" path="/var/lib/kubelet/pods/dc871587-061f-4dbd-8b3b-06c268e9adb0/volumes" Feb 02 09:13:10 crc kubenswrapper[4747]: I0202 09:13:10.536212 4747 generic.go:334] "Generic (PLEG): container finished" podID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerID="e83299fadd886c9eb3470cdd70775eae10f8c497d38526d029a42b15d9541d83" exitCode=0 Feb 02 09:13:10 crc kubenswrapper[4747]: I0202 09:13:10.536250 4747 generic.go:334] "Generic (PLEG): container finished" podID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerID="3a51f8964cbca25b292aeb1e28884bf322289e65a61951cf9b185a7073b310b6" exitCode=2 Feb 02 09:13:10 crc kubenswrapper[4747]: I0202 09:13:10.536261 4747 generic.go:334] "Generic (PLEG): container finished" podID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerID="1c4e7937411ef2e54b711a59e1455ba2ebd6fd13e7197f29f60574022cc44e74" exitCode=0 Feb 02 09:13:10 crc kubenswrapper[4747]: I0202 09:13:10.536289 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerDied","Data":"e83299fadd886c9eb3470cdd70775eae10f8c497d38526d029a42b15d9541d83"} Feb 02 09:13:10 crc kubenswrapper[4747]: I0202 09:13:10.536340 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerDied","Data":"3a51f8964cbca25b292aeb1e28884bf322289e65a61951cf9b185a7073b310b6"} Feb 02 09:13:10 crc kubenswrapper[4747]: I0202 09:13:10.536350 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerDied","Data":"1c4e7937411ef2e54b711a59e1455ba2ebd6fd13e7197f29f60574022cc44e74"} Feb 02 09:13:12 crc kubenswrapper[4747]: I0202 09:13:12.682241 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:13:12 crc kubenswrapper[4747]: I0202 09:13:12.689181 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-75b94989dd-wzr2t" Feb 02 09:13:12 crc kubenswrapper[4747]: I0202 09:13:12.773419 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-56bc7d5584-2ln7s"] Feb 02 09:13:12 crc kubenswrapper[4747]: I0202 09:13:12.773656 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-56bc7d5584-2ln7s" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-log" containerID="cri-o://1ed1f71086fdf558823b3b1b837d1ccdda87cdb4bb6f84eeec5707e072c14cdc" gracePeriod=30 Feb 02 09:13:12 crc kubenswrapper[4747]: I0202 09:13:12.773785 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-56bc7d5584-2ln7s" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-api" containerID="cri-o://81bf19464dd98f6e2c94065a4f95f7e014cfff962b3053b15b09759f6cd0eb5b" gracePeriod=30 Feb 02 09:13:13 crc kubenswrapper[4747]: I0202 09:13:13.580682 4747 generic.go:334] "Generic (PLEG): container finished" podID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerID="1ed1f71086fdf558823b3b1b837d1ccdda87cdb4bb6f84eeec5707e072c14cdc" exitCode=143 Feb 02 09:13:13 crc kubenswrapper[4747]: I0202 09:13:13.580895 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56bc7d5584-2ln7s" event={"ID":"3fc551d2-4d3d-4a72-91e9-5197460ca5bd","Type":"ContainerDied","Data":"1ed1f71086fdf558823b3b1b837d1ccdda87cdb4bb6f84eeec5707e072c14cdc"} Feb 02 09:13:14 crc kubenswrapper[4747]: I0202 09:13:14.593061 4747 generic.go:334] "Generic (PLEG): container finished" podID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerID="a0c42adcbe1996d24d8aebea7daa350ea0b388498a4ddae2b820374337d92f36" exitCode=137 Feb 02 09:13:14 crc kubenswrapper[4747]: I0202 09:13:14.593409 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69bf4987b8-zq2rd" event={"ID":"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae","Type":"ContainerDied","Data":"a0c42adcbe1996d24d8aebea7daa350ea0b388498a4ddae2b820374337d92f36"} Feb 02 09:13:15 crc kubenswrapper[4747]: I0202 09:13:15.609287 4747 generic.go:334] "Generic (PLEG): container finished" podID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerID="61bbd0ff212273713f72a5a99b8951b9509b3c63697d2043b59e598b4d4c20d8" exitCode=0 Feb 02 09:13:15 crc kubenswrapper[4747]: I0202 09:13:15.609340 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerDied","Data":"61bbd0ff212273713f72a5a99b8951b9509b3c63697d2043b59e598b4d4c20d8"} Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.519166 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.619362 4747 generic.go:334] "Generic (PLEG): container finished" podID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerID="81bf19464dd98f6e2c94065a4f95f7e014cfff962b3053b15b09759f6cd0eb5b" exitCode=0 Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.619442 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56bc7d5584-2ln7s" event={"ID":"3fc551d2-4d3d-4a72-91e9-5197460ca5bd","Type":"ContainerDied","Data":"81bf19464dd98f6e2c94065a4f95f7e014cfff962b3053b15b09759f6cd0eb5b"} Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.622887 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62df6fd9-5d5e-4d1b-98ac-023e270505cb","Type":"ContainerDied","Data":"50a21be27c33e0a7a084f0a9673fe9fc88a750288805fd03264dfd81e5078760"} Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.622956 4747 scope.go:117] "RemoveContainer" containerID="e83299fadd886c9eb3470cdd70775eae10f8c497d38526d029a42b15d9541d83" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.622988 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.625300 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.633757 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.645842 4747 scope.go:117] "RemoveContainer" containerID="3a51f8964cbca25b292aeb1e28884bf322289e65a61951cf9b185a7073b310b6" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.677428 4747 scope.go:117] "RemoveContainer" containerID="1c4e7937411ef2e54b711a59e1455ba2ebd6fd13e7197f29f60574022cc44e74" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.711654 4747 scope.go:117] "RemoveContainer" containerID="61bbd0ff212273713f72a5a99b8951b9509b3c63697d2043b59e598b4d4c20d8" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.712565 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-config-data\") pod \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.712644 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fkv9\" (UniqueName: \"kubernetes.io/projected/62df6fd9-5d5e-4d1b-98ac-023e270505cb-kube-api-access-5fkv9\") pod \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.712706 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-run-httpd\") pod \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.712733 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-scripts\") pod \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.712801 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-log-httpd\") pod \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.712837 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-combined-ca-bundle\") pod \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.712950 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-sg-core-conf-yaml\") pod \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\" (UID: \"62df6fd9-5d5e-4d1b-98ac-023e270505cb\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.713623 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "62df6fd9-5d5e-4d1b-98ac-023e270505cb" (UID: "62df6fd9-5d5e-4d1b-98ac-023e270505cb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.713929 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "62df6fd9-5d5e-4d1b-98ac-023e270505cb" (UID: "62df6fd9-5d5e-4d1b-98ac-023e270505cb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.718187 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-scripts" (OuterVolumeSpecName: "scripts") pod "62df6fd9-5d5e-4d1b-98ac-023e270505cb" (UID: "62df6fd9-5d5e-4d1b-98ac-023e270505cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.718592 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62df6fd9-5d5e-4d1b-98ac-023e270505cb-kube-api-access-5fkv9" (OuterVolumeSpecName: "kube-api-access-5fkv9") pod "62df6fd9-5d5e-4d1b-98ac-023e270505cb" (UID: "62df6fd9-5d5e-4d1b-98ac-023e270505cb"). InnerVolumeSpecName "kube-api-access-5fkv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.744322 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "62df6fd9-5d5e-4d1b-98ac-023e270505cb" (UID: "62df6fd9-5d5e-4d1b-98ac-023e270505cb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.799330 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62df6fd9-5d5e-4d1b-98ac-023e270505cb" (UID: "62df6fd9-5d5e-4d1b-98ac-023e270505cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.814192 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-config-data\") pod \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.814456 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-logs\") pod \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.814564 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-scripts\") pod \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.814798 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5prbt\" (UniqueName: \"kubernetes.io/projected/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-kube-api-access-5prbt\") pod \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.814881 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-combined-ca-bundle\") pod \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.814994 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-tls-certs\") pod \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.815080 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-internal-tls-certs\") pod \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.815150 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-logs" (OuterVolumeSpecName: "logs") pod "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" (UID: "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.815314 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-scripts\") pod \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.815418 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-config-data\") pod \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.815524 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-public-tls-certs\") pod \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.816229 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-secret-key\") pod \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\" (UID: \"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.816360 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vj7gz\" (UniqueName: \"kubernetes.io/projected/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-kube-api-access-vj7gz\") pod \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.816481 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-logs\") pod \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.816583 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-combined-ca-bundle\") pod \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\" (UID: \"3fc551d2-4d3d-4a72-91e9-5197460ca5bd\") " Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.818042 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-logs" (OuterVolumeSpecName: "logs") pod "3fc551d2-4d3d-4a72-91e9-5197460ca5bd" (UID: "3fc551d2-4d3d-4a72-91e9-5197460ca5bd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.818388 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-kube-api-access-5prbt" (OuterVolumeSpecName: "kube-api-access-5prbt") pod "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" (UID: "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae"). InnerVolumeSpecName "kube-api-access-5prbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819620 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819661 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819682 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819700 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fkv9\" (UniqueName: \"kubernetes.io/projected/62df6fd9-5d5e-4d1b-98ac-023e270505cb-kube-api-access-5fkv9\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819718 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819732 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819777 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819794 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5prbt\" (UniqueName: \"kubernetes.io/projected/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-kube-api-access-5prbt\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.819809 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62df6fd9-5d5e-4d1b-98ac-023e270505cb-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.822386 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-kube-api-access-vj7gz" (OuterVolumeSpecName: "kube-api-access-vj7gz") pod "3fc551d2-4d3d-4a72-91e9-5197460ca5bd" (UID: "3fc551d2-4d3d-4a72-91e9-5197460ca5bd"). InnerVolumeSpecName "kube-api-access-vj7gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.823539 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-scripts" (OuterVolumeSpecName: "scripts") pod "3fc551d2-4d3d-4a72-91e9-5197460ca5bd" (UID: "3fc551d2-4d3d-4a72-91e9-5197460ca5bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.824516 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" (UID: "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.838254 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-config-data" (OuterVolumeSpecName: "config-data") pod "62df6fd9-5d5e-4d1b-98ac-023e270505cb" (UID: "62df6fd9-5d5e-4d1b-98ac-023e270505cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.856301 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-config-data" (OuterVolumeSpecName: "config-data") pod "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" (UID: "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.862688 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" (UID: "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.867996 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-scripts" (OuterVolumeSpecName: "scripts") pod "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" (UID: "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.874685 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" (UID: "9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.884749 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-config-data" (OuterVolumeSpecName: "config-data") pod "3fc551d2-4d3d-4a72-91e9-5197460ca5bd" (UID: "3fc551d2-4d3d-4a72-91e9-5197460ca5bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.889988 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3fc551d2-4d3d-4a72-91e9-5197460ca5bd" (UID: "3fc551d2-4d3d-4a72-91e9-5197460ca5bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920714 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920760 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920773 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920786 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920798 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920809 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920822 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920837 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62df6fd9-5d5e-4d1b-98ac-023e270505cb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920849 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vj7gz\" (UniqueName: \"kubernetes.io/projected/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-kube-api-access-vj7gz\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.920860 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.930197 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3fc551d2-4d3d-4a72-91e9-5197460ca5bd" (UID: "3fc551d2-4d3d-4a72-91e9-5197460ca5bd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.938568 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3fc551d2-4d3d-4a72-91e9-5197460ca5bd" (UID: "3fc551d2-4d3d-4a72-91e9-5197460ca5bd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.966208 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.978250 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.986479 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.986855 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="extract-utilities" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.986874 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="extract-utilities" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.986886 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-log" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.986892 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-log" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.986906 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-notification-agent" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.986913 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-notification-agent" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.986947 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="extract-content" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.986955 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="extract-content" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.986969 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.986976 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.986984 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-central-agent" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.986993 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-central-agent" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.987003 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-api" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987010 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-api" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.987027 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="sg-core" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987034 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="sg-core" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.987051 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987058 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.987068 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon-log" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987074 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon-log" Feb 02 09:13:16 crc kubenswrapper[4747]: E0202 09:13:16.987088 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="proxy-httpd" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987094 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="proxy-httpd" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987280 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-central-agent" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987297 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-api" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987307 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987313 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="sg-core" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987324 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="ceilometer-notification-agent" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987534 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" containerName="proxy-httpd" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987543 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" containerName="horizon-log" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987552 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" containerName="placement-log" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.987567 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc871587-061f-4dbd-8b3b-06c268e9adb0" containerName="registry-server" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.989656 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.992014 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 09:13:16 crc kubenswrapper[4747]: I0202 09:13:16.992970 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.000233 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.021398 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.021657 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.021743 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfhmh\" (UniqueName: \"kubernetes.io/projected/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-kube-api-access-gfhmh\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.021926 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-scripts\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.022099 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-config-data\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.022226 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.022315 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.022416 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.022488 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3fc551d2-4d3d-4a72-91e9-5197460ca5bd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.123738 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-scripts\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.124007 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-config-data\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.124167 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.124356 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.124778 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.124953 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.125070 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfhmh\" (UniqueName: \"kubernetes.io/projected/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-kube-api-access-gfhmh\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.125858 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.127437 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.128357 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-scripts\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.141565 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.142576 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.143453 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-config-data\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.146376 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfhmh\" (UniqueName: \"kubernetes.io/projected/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-kube-api-access-gfhmh\") pod \"ceilometer-0\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.195037 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.195344 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-log" containerID="cri-o://9016c0851c6fdf14fa1ff74e3f1252089083a7580b71dfe23376fb41cb220426" gracePeriod=30 Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.195468 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-httpd" containerID="cri-o://09803ea36b706f383f5d28af6229736fc7052fab541590b73e91c1177bda5d40" gracePeriod=30 Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.310774 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.634752 4747 generic.go:334] "Generic (PLEG): container finished" podID="494e1957-39a0-4704-b0d3-7475cf24178b" containerID="9016c0851c6fdf14fa1ff74e3f1252089083a7580b71dfe23376fb41cb220426" exitCode=143 Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.635108 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"494e1957-39a0-4704-b0d3-7475cf24178b","Type":"ContainerDied","Data":"9016c0851c6fdf14fa1ff74e3f1252089083a7580b71dfe23376fb41cb220426"} Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.637024 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56bc7d5584-2ln7s" event={"ID":"3fc551d2-4d3d-4a72-91e9-5197460ca5bd","Type":"ContainerDied","Data":"892d754c334821579b9ca20ccde85cea1e68dcb30b98ab8d2595f96970eeb1ba"} Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.637071 4747 scope.go:117] "RemoveContainer" containerID="81bf19464dd98f6e2c94065a4f95f7e014cfff962b3053b15b09759f6cd0eb5b" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.637194 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56bc7d5584-2ln7s" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.641298 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" event={"ID":"742b6645-afca-42df-9aad-6b6e1e93790b","Type":"ContainerStarted","Data":"b177c0e96f813e1c664183cc01f48afc1955b2119c11e31e031227db4f59e861"} Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.644316 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69bf4987b8-zq2rd" event={"ID":"9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae","Type":"ContainerDied","Data":"c082ab8faf10c0263c6de1abd51fc6d573476ff026bc934eb1e51cb040fab256"} Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.644394 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69bf4987b8-zq2rd" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.673323 4747 scope.go:117] "RemoveContainer" containerID="1ed1f71086fdf558823b3b1b837d1ccdda87cdb4bb6f84eeec5707e072c14cdc" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.694199 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" podStartSLOduration=2.673671551 podStartE2EDuration="10.694155267s" podCreationTimestamp="2026-02-02 09:13:07 +0000 UTC" firstStartedPulling="2026-02-02 09:13:08.227966627 +0000 UTC m=+1000.772305060" lastFinishedPulling="2026-02-02 09:13:16.248450343 +0000 UTC m=+1008.792788776" observedRunningTime="2026-02-02 09:13:17.673428177 +0000 UTC m=+1010.217766610" watchObservedRunningTime="2026-02-02 09:13:17.694155267 +0000 UTC m=+1010.238493720" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.695710 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-56bc7d5584-2ln7s"] Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.700403 4747 scope.go:117] "RemoveContainer" containerID="b58beb9bccf8377ae3190cb61f1add3bdb034c8ba31eebd73a6e7366b3cb3328" Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.707844 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-56bc7d5584-2ln7s"] Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.729471 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69bf4987b8-zq2rd"] Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.736709 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-69bf4987b8-zq2rd"] Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.785318 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:17 crc kubenswrapper[4747]: I0202 09:13:17.876669 4747 scope.go:117] "RemoveContainer" containerID="a0c42adcbe1996d24d8aebea7daa350ea0b388498a4ddae2b820374337d92f36" Feb 02 09:13:17 crc kubenswrapper[4747]: W0202 09:13:17.882605 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e5ce8ec_4180_402e_b931_a24bb7f46fc7.slice/crio-272b00933507964b260fac3691868b14db11fbca40cbbefa1c3d8094d2776b68 WatchSource:0}: Error finding container 272b00933507964b260fac3691868b14db11fbca40cbbefa1c3d8094d2776b68: Status 404 returned error can't find the container with id 272b00933507964b260fac3691868b14db11fbca40cbbefa1c3d8094d2776b68 Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.356379 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fc551d2-4d3d-4a72-91e9-5197460ca5bd" path="/var/lib/kubelet/pods/3fc551d2-4d3d-4a72-91e9-5197460ca5bd/volumes" Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.358368 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62df6fd9-5d5e-4d1b-98ac-023e270505cb" path="/var/lib/kubelet/pods/62df6fd9-5d5e-4d1b-98ac-023e270505cb/volumes" Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.360716 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae" path="/var/lib/kubelet/pods/9e1bfdd3-f021-48b0-bf1e-98c03a5d99ae/volumes" Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.516160 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.516400 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-log" containerID="cri-o://0c10e955cfb68a859c2eab061a117c62ec17f54ace01c3558062a140c06db725" gracePeriod=30 Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.516500 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-httpd" containerID="cri-o://7f3b1a103d81886ff14d2f4ae28b712738e8be05cf6adf32319381795cead84b" gracePeriod=30 Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.687357 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerStarted","Data":"272b00933507964b260fac3691868b14db11fbca40cbbefa1c3d8094d2776b68"} Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.700245 4747 generic.go:334] "Generic (PLEG): container finished" podID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerID="0c10e955cfb68a859c2eab061a117c62ec17f54ace01c3558062a140c06db725" exitCode=143 Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.700334 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cfce7950-bdca-4938-9579-4abf3357d9f0","Type":"ContainerDied","Data":"0c10e955cfb68a859c2eab061a117c62ec17f54ace01c3558062a140c06db725"} Feb 02 09:13:18 crc kubenswrapper[4747]: I0202 09:13:18.815906 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:19 crc kubenswrapper[4747]: I0202 09:13:19.718029 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerStarted","Data":"8ed993d8e7466c4f036c43baa4fe684ae318bfd4d3c4a998fca48ef178b0579c"} Feb 02 09:13:19 crc kubenswrapper[4747]: I0202 09:13:19.718400 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerStarted","Data":"f1d440a5575d6bc6d5b9cf17a8bd2992628327a95e7b020f7e22fecd89758377"} Feb 02 09:13:20 crc kubenswrapper[4747]: I0202 09:13:20.765644 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerStarted","Data":"20f1540204f3a666b3236176863ff53cbc5d36ec09df7d5703e1e1cdce518adc"} Feb 02 09:13:20 crc kubenswrapper[4747]: I0202 09:13:20.767816 4747 generic.go:334] "Generic (PLEG): container finished" podID="494e1957-39a0-4704-b0d3-7475cf24178b" containerID="09803ea36b706f383f5d28af6229736fc7052fab541590b73e91c1177bda5d40" exitCode=0 Feb 02 09:13:20 crc kubenswrapper[4747]: I0202 09:13:20.767853 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"494e1957-39a0-4704-b0d3-7475cf24178b","Type":"ContainerDied","Data":"09803ea36b706f383f5d28af6229736fc7052fab541590b73e91c1177bda5d40"} Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.078960 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.198670 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-public-tls-certs\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.198728 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-logs\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.198854 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-combined-ca-bundle\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.198891 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqp6p\" (UniqueName: \"kubernetes.io/projected/494e1957-39a0-4704-b0d3-7475cf24178b-kube-api-access-hqp6p\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.198909 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-scripts\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.199002 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-config-data\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.199071 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.199112 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-httpd-run\") pod \"494e1957-39a0-4704-b0d3-7475cf24178b\" (UID: \"494e1957-39a0-4704-b0d3-7475cf24178b\") " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.199909 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.200447 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-logs" (OuterVolumeSpecName: "logs") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.206186 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/494e1957-39a0-4704-b0d3-7475cf24178b-kube-api-access-hqp6p" (OuterVolumeSpecName: "kube-api-access-hqp6p") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "kube-api-access-hqp6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.221129 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-scripts" (OuterVolumeSpecName: "scripts") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.235819 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.264546 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.268068 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-config-data" (OuterVolumeSpecName: "config-data") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.299472 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "494e1957-39a0-4704-b0d3-7475cf24178b" (UID: "494e1957-39a0-4704-b0d3-7475cf24178b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300715 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300738 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300746 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300761 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/494e1957-39a0-4704-b0d3-7475cf24178b-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300769 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300779 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqp6p\" (UniqueName: \"kubernetes.io/projected/494e1957-39a0-4704-b0d3-7475cf24178b-kube-api-access-hqp6p\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300788 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.300796 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/494e1957-39a0-4704-b0d3-7475cf24178b-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.322548 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.402955 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.778983 4747 generic.go:334] "Generic (PLEG): container finished" podID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerID="7f3b1a103d81886ff14d2f4ae28b712738e8be05cf6adf32319381795cead84b" exitCode=0 Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.779062 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cfce7950-bdca-4938-9579-4abf3357d9f0","Type":"ContainerDied","Data":"7f3b1a103d81886ff14d2f4ae28b712738e8be05cf6adf32319381795cead84b"} Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.781551 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"494e1957-39a0-4704-b0d3-7475cf24178b","Type":"ContainerDied","Data":"e088edf1f2464df10d6c55b3893d4c795a1b08e4ac79d509b47b0738e9afe7d5"} Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.781594 4747 scope.go:117] "RemoveContainer" containerID="09803ea36b706f383f5d28af6229736fc7052fab541590b73e91c1177bda5d40" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.781631 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.824095 4747 scope.go:117] "RemoveContainer" containerID="9016c0851c6fdf14fa1ff74e3f1252089083a7580b71dfe23376fb41cb220426" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.831155 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.848541 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.859426 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:13:21 crc kubenswrapper[4747]: E0202 09:13:21.859857 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-httpd" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.859873 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-httpd" Feb 02 09:13:21 crc kubenswrapper[4747]: E0202 09:13:21.859915 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-log" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.859923 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-log" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.860166 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-httpd" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.860191 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" containerName="glance-log" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.861413 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.865439 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.872267 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 09:13:21 crc kubenswrapper[4747]: I0202 09:13:21.875444 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012407 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bba7d405-55c1-4fd1-91ba-d1a235f09160-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012465 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-config-data\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012523 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bba7d405-55c1-4fd1-91ba-d1a235f09160-logs\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012542 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-scripts\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012559 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012649 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csd2h\" (UniqueName: \"kubernetes.io/projected/bba7d405-55c1-4fd1-91ba-d1a235f09160-kube-api-access-csd2h\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012672 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.012701 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.113856 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csd2h\" (UniqueName: \"kubernetes.io/projected/bba7d405-55c1-4fd1-91ba-d1a235f09160-kube-api-access-csd2h\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.113914 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.113987 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.114043 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bba7d405-55c1-4fd1-91ba-d1a235f09160-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.114101 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-config-data\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.114143 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bba7d405-55c1-4fd1-91ba-d1a235f09160-logs\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.114176 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-scripts\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.114203 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.114911 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bba7d405-55c1-4fd1-91ba-d1a235f09160-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.115317 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.115331 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bba7d405-55c1-4fd1-91ba-d1a235f09160-logs\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.124709 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-scripts\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.134204 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-config-data\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.137842 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csd2h\" (UniqueName: \"kubernetes.io/projected/bba7d405-55c1-4fd1-91ba-d1a235f09160-kube-api-access-csd2h\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.155872 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.164529 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bba7d405-55c1-4fd1-91ba-d1a235f09160-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.217653 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bba7d405-55c1-4fd1-91ba-d1a235f09160\") " pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.286044 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.367440 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="494e1957-39a0-4704-b0d3-7475cf24178b" path="/var/lib/kubelet/pods/494e1957-39a0-4704-b0d3-7475cf24178b/volumes" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.419839 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-httpd-run\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.419924 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-scripts\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.420014 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc5nj\" (UniqueName: \"kubernetes.io/projected/cfce7950-bdca-4938-9579-4abf3357d9f0-kube-api-access-fc5nj\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.420045 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-combined-ca-bundle\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.420096 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-logs\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.420111 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.420203 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-config-data\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.420247 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-internal-tls-certs\") pod \"cfce7950-bdca-4938-9579-4abf3357d9f0\" (UID: \"cfce7950-bdca-4938-9579-4abf3357d9f0\") " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.430457 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.430548 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-logs" (OuterVolumeSpecName: "logs") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.434031 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.434358 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-scripts" (OuterVolumeSpecName: "scripts") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.441048 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfce7950-bdca-4938-9579-4abf3357d9f0-kube-api-access-fc5nj" (OuterVolumeSpecName: "kube-api-access-fc5nj") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "kube-api-access-fc5nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.453925 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.483076 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.488606 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.512113 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-config-data" (OuterVolumeSpecName: "config-data") pod "cfce7950-bdca-4938-9579-4abf3357d9f0" (UID: "cfce7950-bdca-4938-9579-4abf3357d9f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522317 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522366 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522383 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522397 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522408 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc5nj\" (UniqueName: \"kubernetes.io/projected/cfce7950-bdca-4938-9579-4abf3357d9f0-kube-api-access-fc5nj\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522418 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfce7950-bdca-4938-9579-4abf3357d9f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522427 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfce7950-bdca-4938-9579-4abf3357d9f0-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.522463 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.550468 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.627393 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.791035 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cfce7950-bdca-4938-9579-4abf3357d9f0","Type":"ContainerDied","Data":"9f386dfe6401ee5844422d788ecab18505eeddf2662f5fb0ed36df96c395d832"} Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.791329 4747 scope.go:117] "RemoveContainer" containerID="7f3b1a103d81886ff14d2f4ae28b712738e8be05cf6adf32319381795cead84b" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.791097 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.803173 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerStarted","Data":"a5929a36b1e333d66e50587da5e9c2810b2c1d81eb25452c13d5268758906153"} Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.803348 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-central-agent" containerID="cri-o://f1d440a5575d6bc6d5b9cf17a8bd2992628327a95e7b020f7e22fecd89758377" gracePeriod=30 Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.803502 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.803648 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="proxy-httpd" containerID="cri-o://a5929a36b1e333d66e50587da5e9c2810b2c1d81eb25452c13d5268758906153" gracePeriod=30 Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.803798 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-notification-agent" containerID="cri-o://8ed993d8e7466c4f036c43baa4fe684ae318bfd4d3c4a998fca48ef178b0579c" gracePeriod=30 Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.803844 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="sg-core" containerID="cri-o://20f1540204f3a666b3236176863ff53cbc5d36ec09df7d5703e1e1cdce518adc" gracePeriod=30 Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.843078 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.338237441 podStartE2EDuration="6.843053615s" podCreationTimestamp="2026-02-02 09:13:16 +0000 UTC" firstStartedPulling="2026-02-02 09:13:17.885103435 +0000 UTC m=+1010.429441868" lastFinishedPulling="2026-02-02 09:13:22.389919609 +0000 UTC m=+1014.934258042" observedRunningTime="2026-02-02 09:13:22.835416833 +0000 UTC m=+1015.379755276" watchObservedRunningTime="2026-02-02 09:13:22.843053615 +0000 UTC m=+1015.387392048" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.857474 4747 scope.go:117] "RemoveContainer" containerID="0c10e955cfb68a859c2eab061a117c62ec17f54ace01c3558062a140c06db725" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.881308 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.887925 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.906488 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:13:22 crc kubenswrapper[4747]: E0202 09:13:22.906949 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-log" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.906970 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-log" Feb 02 09:13:22 crc kubenswrapper[4747]: E0202 09:13:22.907014 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-httpd" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.907025 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-httpd" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.907247 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-httpd" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.907294 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" containerName="glance-log" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.908407 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.913440 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.913657 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 09:13:22 crc kubenswrapper[4747]: I0202 09:13:22.920736 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036075 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-logs\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036169 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w89l8\" (UniqueName: \"kubernetes.io/projected/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-kube-api-access-w89l8\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036282 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036307 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036326 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036354 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036412 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.036442 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.057652 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138717 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w89l8\" (UniqueName: \"kubernetes.io/projected/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-kube-api-access-w89l8\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138779 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138804 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138822 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138853 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138917 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138957 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.138983 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-logs\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.139239 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.139715 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.140751 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-logs\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.147779 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.160545 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.160768 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.161273 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.176778 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w89l8\" (UniqueName: \"kubernetes.io/projected/d6b158e0-57fd-41f8-8d2d-462d6da18ab0-kube-api-access-w89l8\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.263758 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d6b158e0-57fd-41f8-8d2d-462d6da18ab0\") " pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.285532 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.844230 4747 generic.go:334] "Generic (PLEG): container finished" podID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerID="20f1540204f3a666b3236176863ff53cbc5d36ec09df7d5703e1e1cdce518adc" exitCode=2 Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.844754 4747 generic.go:334] "Generic (PLEG): container finished" podID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerID="8ed993d8e7466c4f036c43baa4fe684ae318bfd4d3c4a998fca48ef178b0579c" exitCode=0 Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.844837 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerDied","Data":"20f1540204f3a666b3236176863ff53cbc5d36ec09df7d5703e1e1cdce518adc"} Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.844870 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerDied","Data":"8ed993d8e7466c4f036c43baa4fe684ae318bfd4d3c4a998fca48ef178b0579c"} Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.855429 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bba7d405-55c1-4fd1-91ba-d1a235f09160","Type":"ContainerStarted","Data":"fa8fa028affe8b17f84e02ee3610d35227a06178d8488b2adc8bafc1fb424fd3"} Feb 02 09:13:23 crc kubenswrapper[4747]: I0202 09:13:23.891609 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 09:13:24 crc kubenswrapper[4747]: I0202 09:13:24.383424 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfce7950-bdca-4938-9579-4abf3357d9f0" path="/var/lib/kubelet/pods/cfce7950-bdca-4938-9579-4abf3357d9f0/volumes" Feb 02 09:13:24 crc kubenswrapper[4747]: I0202 09:13:24.877065 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d6b158e0-57fd-41f8-8d2d-462d6da18ab0","Type":"ContainerStarted","Data":"e0ae7251a6750fd4c525d716c85202c061a75f00c060d886d5049e6d79a1b8c2"} Feb 02 09:13:24 crc kubenswrapper[4747]: I0202 09:13:24.877126 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d6b158e0-57fd-41f8-8d2d-462d6da18ab0","Type":"ContainerStarted","Data":"124beff9563a50b66effc0316129db79cea1a93691439d5beedcd6ece3144ac9"} Feb 02 09:13:24 crc kubenswrapper[4747]: I0202 09:13:24.878980 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bba7d405-55c1-4fd1-91ba-d1a235f09160","Type":"ContainerStarted","Data":"a4bb4916fc96aefee75a45777450598c38c25705ad88ce5fb7505588af60a3e4"} Feb 02 09:13:24 crc kubenswrapper[4747]: I0202 09:13:24.879014 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bba7d405-55c1-4fd1-91ba-d1a235f09160","Type":"ContainerStarted","Data":"66d6b8cf6a372675f9faa591e640ac6e923d5eaa8ea0b166c1ec00f066543d30"} Feb 02 09:13:24 crc kubenswrapper[4747]: I0202 09:13:24.912866 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.912843659 podStartE2EDuration="3.912843659s" podCreationTimestamp="2026-02-02 09:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:24.903339901 +0000 UTC m=+1017.447678334" watchObservedRunningTime="2026-02-02 09:13:24.912843659 +0000 UTC m=+1017.457182102" Feb 02 09:13:25 crc kubenswrapper[4747]: I0202 09:13:25.891567 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d6b158e0-57fd-41f8-8d2d-462d6da18ab0","Type":"ContainerStarted","Data":"08aef3c6c080d0043d7e9a3fd22eff4069fe5bdc2d8055ce966876e6e0dd70b6"} Feb 02 09:13:25 crc kubenswrapper[4747]: I0202 09:13:25.918317 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.918299021 podStartE2EDuration="3.918299021s" podCreationTimestamp="2026-02-02 09:13:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:25.914743232 +0000 UTC m=+1018.459081665" watchObservedRunningTime="2026-02-02 09:13:25.918299021 +0000 UTC m=+1018.462637454" Feb 02 09:13:27 crc kubenswrapper[4747]: I0202 09:13:27.908172 4747 generic.go:334] "Generic (PLEG): container finished" podID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerID="f1d440a5575d6bc6d5b9cf17a8bd2992628327a95e7b020f7e22fecd89758377" exitCode=0 Feb 02 09:13:27 crc kubenswrapper[4747]: I0202 09:13:27.908237 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerDied","Data":"f1d440a5575d6bc6d5b9cf17a8bd2992628327a95e7b020f7e22fecd89758377"} Feb 02 09:13:28 crc kubenswrapper[4747]: I0202 09:13:28.920381 4747 generic.go:334] "Generic (PLEG): container finished" podID="742b6645-afca-42df-9aad-6b6e1e93790b" containerID="b177c0e96f813e1c664183cc01f48afc1955b2119c11e31e031227db4f59e861" exitCode=0 Feb 02 09:13:28 crc kubenswrapper[4747]: I0202 09:13:28.920716 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" event={"ID":"742b6645-afca-42df-9aad-6b6e1e93790b","Type":"ContainerDied","Data":"b177c0e96f813e1c664183cc01f48afc1955b2119c11e31e031227db4f59e861"} Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.296654 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.388964 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-combined-ca-bundle\") pod \"742b6645-afca-42df-9aad-6b6e1e93790b\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.389024 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-scripts\") pod \"742b6645-afca-42df-9aad-6b6e1e93790b\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.389059 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4q9k\" (UniqueName: \"kubernetes.io/projected/742b6645-afca-42df-9aad-6b6e1e93790b-kube-api-access-p4q9k\") pod \"742b6645-afca-42df-9aad-6b6e1e93790b\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.389157 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-config-data\") pod \"742b6645-afca-42df-9aad-6b6e1e93790b\" (UID: \"742b6645-afca-42df-9aad-6b6e1e93790b\") " Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.397560 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/742b6645-afca-42df-9aad-6b6e1e93790b-kube-api-access-p4q9k" (OuterVolumeSpecName: "kube-api-access-p4q9k") pod "742b6645-afca-42df-9aad-6b6e1e93790b" (UID: "742b6645-afca-42df-9aad-6b6e1e93790b"). InnerVolumeSpecName "kube-api-access-p4q9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.406132 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-scripts" (OuterVolumeSpecName: "scripts") pod "742b6645-afca-42df-9aad-6b6e1e93790b" (UID: "742b6645-afca-42df-9aad-6b6e1e93790b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.421724 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-config-data" (OuterVolumeSpecName: "config-data") pod "742b6645-afca-42df-9aad-6b6e1e93790b" (UID: "742b6645-afca-42df-9aad-6b6e1e93790b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.425043 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "742b6645-afca-42df-9aad-6b6e1e93790b" (UID: "742b6645-afca-42df-9aad-6b6e1e93790b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.491630 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.491678 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.491692 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4q9k\" (UniqueName: \"kubernetes.io/projected/742b6645-afca-42df-9aad-6b6e1e93790b-kube-api-access-p4q9k\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.491706 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742b6645-afca-42df-9aad-6b6e1e93790b-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.943030 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.943077 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rvtdm" event={"ID":"742b6645-afca-42df-9aad-6b6e1e93790b","Type":"ContainerDied","Data":"efc9c61cd5d16118397665b2c51a9c249f23ef97e68a25f1441823f6e93859ff"} Feb 02 09:13:30 crc kubenswrapper[4747]: I0202 09:13:30.943131 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efc9c61cd5d16118397665b2c51a9c249f23ef97e68a25f1441823f6e93859ff" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.059042 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:31 crc kubenswrapper[4747]: E0202 09:13:31.059490 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742b6645-afca-42df-9aad-6b6e1e93790b" containerName="nova-cell0-conductor-db-sync" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.059518 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="742b6645-afca-42df-9aad-6b6e1e93790b" containerName="nova-cell0-conductor-db-sync" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.059750 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="742b6645-afca-42df-9aad-6b6e1e93790b" containerName="nova-cell0-conductor-db-sync" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.060520 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.062521 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-9cp66" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.068623 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.077367 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.203691 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45tsg\" (UniqueName: \"kubernetes.io/projected/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-kube-api-access-45tsg\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.204208 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.204377 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.305880 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.305957 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45tsg\" (UniqueName: \"kubernetes.io/projected/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-kube-api-access-45tsg\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.306053 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.311695 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.317658 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.324205 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45tsg\" (UniqueName: \"kubernetes.io/projected/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-kube-api-access-45tsg\") pod \"nova-cell0-conductor-0\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.378423 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.817687 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:31 crc kubenswrapper[4747]: I0202 09:13:31.964169 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c","Type":"ContainerStarted","Data":"04030f68f2fcb4b2ab6b6dee11c3a9bab5361b4702ccbb296e23fd12e00a4221"} Feb 02 09:13:32 crc kubenswrapper[4747]: I0202 09:13:32.489638 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 09:13:32 crc kubenswrapper[4747]: I0202 09:13:32.490365 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 09:13:32 crc kubenswrapper[4747]: I0202 09:13:32.527213 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 09:13:32 crc kubenswrapper[4747]: I0202 09:13:32.537729 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 09:13:32 crc kubenswrapper[4747]: I0202 09:13:32.972540 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 09:13:32 crc kubenswrapper[4747]: I0202 09:13:32.972856 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 09:13:33 crc kubenswrapper[4747]: I0202 09:13:33.286737 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:33 crc kubenswrapper[4747]: I0202 09:13:33.286816 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:33 crc kubenswrapper[4747]: I0202 09:13:33.329359 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:33 crc kubenswrapper[4747]: I0202 09:13:33.342522 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:33 crc kubenswrapper[4747]: I0202 09:13:33.981192 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:33 crc kubenswrapper[4747]: I0202 09:13:33.981519 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:34 crc kubenswrapper[4747]: I0202 09:13:34.863534 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 09:13:34 crc kubenswrapper[4747]: I0202 09:13:34.902736 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 09:13:35 crc kubenswrapper[4747]: I0202 09:13:35.014833 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c","Type":"ContainerStarted","Data":"055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161"} Feb 02 09:13:35 crc kubenswrapper[4747]: I0202 09:13:35.015010 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:35 crc kubenswrapper[4747]: I0202 09:13:35.045551 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=4.045524006 podStartE2EDuration="4.045524006s" podCreationTimestamp="2026-02-02 09:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:35.031537864 +0000 UTC m=+1027.575876307" watchObservedRunningTime="2026-02-02 09:13:35.045524006 +0000 UTC m=+1027.589862449" Feb 02 09:13:35 crc kubenswrapper[4747]: I0202 09:13:35.359530 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:36 crc kubenswrapper[4747]: I0202 09:13:36.132815 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:36 crc kubenswrapper[4747]: I0202 09:13:36.132961 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 09:13:36 crc kubenswrapper[4747]: I0202 09:13:36.141770 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 09:13:37 crc kubenswrapper[4747]: I0202 09:13:37.033577 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" containerName="nova-cell0-conductor-conductor" containerID="cri-o://055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161" gracePeriod=30 Feb 02 09:13:37 crc kubenswrapper[4747]: I0202 09:13:37.846896 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:37 crc kubenswrapper[4747]: I0202 09:13:37.989912 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-combined-ca-bundle\") pod \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " Feb 02 09:13:37 crc kubenswrapper[4747]: I0202 09:13:37.990065 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-config-data\") pod \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " Feb 02 09:13:37 crc kubenswrapper[4747]: I0202 09:13:37.990274 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45tsg\" (UniqueName: \"kubernetes.io/projected/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-kube-api-access-45tsg\") pod \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\" (UID: \"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c\") " Feb 02 09:13:37 crc kubenswrapper[4747]: I0202 09:13:37.995815 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-kube-api-access-45tsg" (OuterVolumeSpecName: "kube-api-access-45tsg") pod "d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" (UID: "d18eb81e-dc14-4b9b-99ed-0b7e7baff05c"). InnerVolumeSpecName "kube-api-access-45tsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.023183 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" (UID: "d18eb81e-dc14-4b9b-99ed-0b7e7baff05c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.031762 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-config-data" (OuterVolumeSpecName: "config-data") pod "d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" (UID: "d18eb81e-dc14-4b9b-99ed-0b7e7baff05c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.048545 4747 generic.go:334] "Generic (PLEG): container finished" podID="d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" containerID="055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161" exitCode=0 Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.048595 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c","Type":"ContainerDied","Data":"055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161"} Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.048627 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d18eb81e-dc14-4b9b-99ed-0b7e7baff05c","Type":"ContainerDied","Data":"04030f68f2fcb4b2ab6b6dee11c3a9bab5361b4702ccbb296e23fd12e00a4221"} Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.048648 4747 scope.go:117] "RemoveContainer" containerID="055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.048784 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.093331 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.093378 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.093396 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45tsg\" (UniqueName: \"kubernetes.io/projected/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c-kube-api-access-45tsg\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.127566 4747 scope.go:117] "RemoveContainer" containerID="055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161" Feb 02 09:13:38 crc kubenswrapper[4747]: E0202 09:13:38.128099 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161\": container with ID starting with 055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161 not found: ID does not exist" containerID="055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.128186 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161"} err="failed to get container status \"055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161\": rpc error: code = NotFound desc = could not find container \"055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161\": container with ID starting with 055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161 not found: ID does not exist" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.131386 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.148098 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.159818 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:38 crc kubenswrapper[4747]: E0202 09:13:38.162230 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" containerName="nova-cell0-conductor-conductor" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.162278 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" containerName="nova-cell0-conductor-conductor" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.164537 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" containerName="nova-cell0-conductor-conductor" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.165988 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.172550 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.172768 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-9cp66" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.213281 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.299523 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.299600 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.300073 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzmsq\" (UniqueName: \"kubernetes.io/projected/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-kube-api-access-dzmsq\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.351283 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d18eb81e-dc14-4b9b-99ed-0b7e7baff05c" path="/var/lib/kubelet/pods/d18eb81e-dc14-4b9b-99ed-0b7e7baff05c/volumes" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.402225 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.402295 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.402424 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzmsq\" (UniqueName: \"kubernetes.io/projected/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-kube-api-access-dzmsq\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.407027 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.409509 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.418110 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzmsq\" (UniqueName: \"kubernetes.io/projected/4be5b7d2-85d1-4f15-bcc3-991a97f5fcec-kube-api-access-dzmsq\") pod \"nova-cell0-conductor-0\" (UID: \"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec\") " pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.491488 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:38 crc kubenswrapper[4747]: I0202 09:13:38.835057 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 09:13:39 crc kubenswrapper[4747]: I0202 09:13:39.061871 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec","Type":"ContainerStarted","Data":"a70a0b8a995e1b5f1f53cfe2357a5a5a13675fa7353ac0165e8cf30bfb5425c0"} Feb 02 09:13:39 crc kubenswrapper[4747]: I0202 09:13:39.061924 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4be5b7d2-85d1-4f15-bcc3-991a97f5fcec","Type":"ContainerStarted","Data":"3cdc1e062fc1053e23cadc8c1e6db4b1a9eda2b01a50274fb40b499ecc128ead"} Feb 02 09:13:39 crc kubenswrapper[4747]: I0202 09:13:39.062092 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:39 crc kubenswrapper[4747]: I0202 09:13:39.084349 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.084330462 podStartE2EDuration="1.084330462s" podCreationTimestamp="2026-02-02 09:13:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:39.082488476 +0000 UTC m=+1031.626826909" watchObservedRunningTime="2026-02-02 09:13:39.084330462 +0000 UTC m=+1031.628668895" Feb 02 09:13:47 crc kubenswrapper[4747]: I0202 09:13:47.315609 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Feb 02 09:13:48 crc kubenswrapper[4747]: I0202 09:13:48.523212 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.042852 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-qqtqd"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.044320 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.048588 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.048986 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.054905 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-qqtqd"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.226962 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.228681 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.231688 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.232107 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.232150 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-scripts\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.232242 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lthl\" (UniqueName: \"kubernetes.io/projected/cc167b60-b071-4a53-9cac-27a8dd516321-kube-api-access-6lthl\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.232277 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-config-data\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.240405 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.284496 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.324568 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.332739 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344349 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-config-data\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lthl\" (UniqueName: \"kubernetes.io/projected/cc167b60-b071-4a53-9cac-27a8dd516321-kube-api-access-6lthl\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344476 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-config-data\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344511 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g9nr\" (UniqueName: \"kubernetes.io/projected/3ff49e6e-949d-4f1e-85d6-108fa5edfece-kube-api-access-2g9nr\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344564 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344654 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344702 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-scripts\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.344845 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ff49e6e-949d-4f1e-85d6-108fa5edfece-logs\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.348479 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.372419 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.395773 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-config-data\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.406922 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.408963 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.414230 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-scripts\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.421474 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.422409 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lthl\" (UniqueName: \"kubernetes.io/projected/cc167b60-b071-4a53-9cac-27a8dd516321-kube-api-access-6lthl\") pod \"nova-cell0-cell-mapping-qqtqd\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.431261 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.448436 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.449640 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.454343 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.456034 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.456103 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g9nr\" (UniqueName: \"kubernetes.io/projected/3ff49e6e-949d-4f1e-85d6-108fa5edfece-kube-api-access-2g9nr\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.456140 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwls5\" (UniqueName: \"kubernetes.io/projected/d2182862-d41c-4cc6-a47f-d67738c10793-kube-api-access-wwls5\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.456171 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.456233 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.456330 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ff49e6e-949d-4f1e-85d6-108fa5edfece-logs\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.456396 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-config-data\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.463748 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-65dxk"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.466090 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.467643 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ff49e6e-949d-4f1e-85d6-108fa5edfece-logs\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.467670 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-config-data\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.478348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.479116 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g9nr\" (UniqueName: \"kubernetes.io/projected/3ff49e6e-949d-4f1e-85d6-108fa5edfece-kube-api-access-2g9nr\") pod \"nova-api-0\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.500033 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.509351 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-65dxk"] Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562062 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562134 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-config-data\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562159 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-config\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562190 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562220 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562298 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d627c3b6-9a6d-480a-9d7c-8c72151076bf-logs\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562361 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-config-data\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562438 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562493 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562529 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562600 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwls5\" (UniqueName: \"kubernetes.io/projected/d2182862-d41c-4cc6-a47f-d67738c10793-kube-api-access-wwls5\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562668 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z8cx\" (UniqueName: \"kubernetes.io/projected/d627c3b6-9a6d-480a-9d7c-8c72151076bf-kube-api-access-7z8cx\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562721 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpl8q\" (UniqueName: \"kubernetes.io/projected/15a70c7a-5b74-4348-93e8-2b204ca16d11-kube-api-access-mpl8q\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562749 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.562793 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4xdn\" (UniqueName: \"kubernetes.io/projected/a32c4798-c1f9-4856-a256-7c28fafa04af-kube-api-access-p4xdn\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.569178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.571438 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.584394 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwls5\" (UniqueName: \"kubernetes.io/projected/d2182862-d41c-4cc6-a47f-d67738c10793-kube-api-access-wwls5\") pod \"nova-cell1-novncproxy-0\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.654481 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664487 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z8cx\" (UniqueName: \"kubernetes.io/projected/d627c3b6-9a6d-480a-9d7c-8c72151076bf-kube-api-access-7z8cx\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664600 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpl8q\" (UniqueName: \"kubernetes.io/projected/15a70c7a-5b74-4348-93e8-2b204ca16d11-kube-api-access-mpl8q\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664635 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4xdn\" (UniqueName: \"kubernetes.io/projected/a32c4798-c1f9-4856-a256-7c28fafa04af-kube-api-access-p4xdn\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664668 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664686 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-config-data\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664703 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-config\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664720 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664734 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664757 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664775 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d627c3b6-9a6d-480a-9d7c-8c72151076bf-logs\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664801 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-config-data\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.664834 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.666021 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-config\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.666136 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.666278 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.666379 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d627c3b6-9a6d-480a-9d7c-8c72151076bf-logs\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.666571 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.667162 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.669107 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-config-data\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.671554 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-config-data\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.671791 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.672457 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.680078 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.683691 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z8cx\" (UniqueName: \"kubernetes.io/projected/d627c3b6-9a6d-480a-9d7c-8c72151076bf-kube-api-access-7z8cx\") pod \"nova-metadata-0\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.687044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpl8q\" (UniqueName: \"kubernetes.io/projected/15a70c7a-5b74-4348-93e8-2b204ca16d11-kube-api-access-mpl8q\") pod \"nova-scheduler-0\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.691192 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4xdn\" (UniqueName: \"kubernetes.io/projected/a32c4798-c1f9-4856-a256-7c28fafa04af-kube-api-access-p4xdn\") pod \"dnsmasq-dns-845d6d6f59-65dxk\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.852240 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.911763 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.929413 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:13:49 crc kubenswrapper[4747]: I0202 09:13:49.937790 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.246385 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.256177 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-qqtqd"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.417376 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j5llb"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.418784 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.421545 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.421800 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.427183 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j5llb"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.501805 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.519308 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.574614 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-65dxk"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.581779 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lspd\" (UniqueName: \"kubernetes.io/projected/4675701d-dd13-435f-ada3-53023492cb1e-kube-api-access-8lspd\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.581910 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-scripts\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.582000 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-config-data\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.582030 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.589200 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.683299 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-scripts\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.683572 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-config-data\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.683596 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.683667 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lspd\" (UniqueName: \"kubernetes.io/projected/4675701d-dd13-435f-ada3-53023492cb1e-kube-api-access-8lspd\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.690506 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-scripts\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.690596 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.691152 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-config-data\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.704681 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lspd\" (UniqueName: \"kubernetes.io/projected/4675701d-dd13-435f-ada3-53023492cb1e-kube-api-access-8lspd\") pod \"nova-cell1-conductor-db-sync-j5llb\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:50 crc kubenswrapper[4747]: I0202 09:13:50.771032 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.182119 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"15a70c7a-5b74-4348-93e8-2b204ca16d11","Type":"ContainerStarted","Data":"0fd68ee5813705ebc7c2ce65c77d1f75092219235706a13dab1188563667eafe"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.186126 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ff49e6e-949d-4f1e-85d6-108fa5edfece","Type":"ContainerStarted","Data":"c0b367c345f0ec2fa8712826952568c18951be6b85b6724d16442fa4edafe57e"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.189751 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d2182862-d41c-4cc6-a47f-d67738c10793","Type":"ContainerStarted","Data":"a2d00ef009db0de942cd4776aacce9f51c8c1c7622ee8dbb95ebd86397e3684b"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.191830 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d627c3b6-9a6d-480a-9d7c-8c72151076bf","Type":"ContainerStarted","Data":"bfd8fc92d0ef9095922919e0c9f373c46ab2dc485db19d2d029d85b8bbcbd029"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.193314 4747 generic.go:334] "Generic (PLEG): container finished" podID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerID="5fa6f58e477044f50a64a1e70f639861cd2a105fa9fede5a8869f21882226100" exitCode=0 Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.193921 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" event={"ID":"a32c4798-c1f9-4856-a256-7c28fafa04af","Type":"ContainerDied","Data":"5fa6f58e477044f50a64a1e70f639861cd2a105fa9fede5a8869f21882226100"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.193960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" event={"ID":"a32c4798-c1f9-4856-a256-7c28fafa04af","Type":"ContainerStarted","Data":"88ef9debc36544aacf72d443669c28d81dfefb9064d28cf06a0ed96d773ef5fd"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.202481 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qqtqd" event={"ID":"cc167b60-b071-4a53-9cac-27a8dd516321","Type":"ContainerStarted","Data":"40aba8b534e1c3a42cb1565e2ed91ffcebe9f7b07ef15fa63c9a312e131e2d94"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.202521 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qqtqd" event={"ID":"cc167b60-b071-4a53-9cac-27a8dd516321","Type":"ContainerStarted","Data":"5b2f0a652c6da96e7514f14f22f23be68ce2ca68ad456c82ef992cc60e48cd9d"} Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.248976 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-qqtqd" podStartSLOduration=2.248954974 podStartE2EDuration="2.248954974s" podCreationTimestamp="2026-02-02 09:13:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:51.231452223 +0000 UTC m=+1043.775790656" watchObservedRunningTime="2026-02-02 09:13:51.248954974 +0000 UTC m=+1043.793293407" Feb 02 09:13:51 crc kubenswrapper[4747]: I0202 09:13:51.455275 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j5llb"] Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.213124 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" event={"ID":"a32c4798-c1f9-4856-a256-7c28fafa04af","Type":"ContainerStarted","Data":"815c772183b22ce0317b076c24ed835a1dc87da689f83d13ab6bb2efa2c3f06a"} Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.213449 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.215530 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j5llb" event={"ID":"4675701d-dd13-435f-ada3-53023492cb1e","Type":"ContainerStarted","Data":"d684bb3a72710f3d15448c93b8d1b0f0fc98cdda20a2cf9c1c5b4401db9e1592"} Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.215576 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j5llb" event={"ID":"4675701d-dd13-435f-ada3-53023492cb1e","Type":"ContainerStarted","Data":"ae3fbf14e9091857aec88acd1201582e34191f824ac3931ef9f6aeca84675ce5"} Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.241364 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" podStartSLOduration=3.241343407 podStartE2EDuration="3.241343407s" podCreationTimestamp="2026-02-02 09:13:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:52.232343325 +0000 UTC m=+1044.776681758" watchObservedRunningTime="2026-02-02 09:13:52.241343407 +0000 UTC m=+1044.785681840" Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.244602 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-j5llb" podStartSLOduration=2.244589061 podStartE2EDuration="2.244589061s" podCreationTimestamp="2026-02-02 09:13:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:52.244109408 +0000 UTC m=+1044.788447841" watchObservedRunningTime="2026-02-02 09:13:52.244589061 +0000 UTC m=+1044.788927494" Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.642699 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:52 crc kubenswrapper[4747]: I0202 09:13:52.654627 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:13:52 crc kubenswrapper[4747]: W0202 09:13:52.863769 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd18eb81e_dc14_4b9b_99ed_0b7e7baff05c.slice/crio-055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161.scope WatchSource:0}: Error finding container 055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161: Status 404 returned error can't find the container with id 055aae84c9ba4c8c164ae0453731b60e12a2901c79677e31a2a9ab6da56e8161 Feb 02 09:13:53 crc kubenswrapper[4747]: E0202 09:13:53.123059 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e5ce8ec_4180_402e_b931_a24bb7f46fc7.slice/crio-a5929a36b1e333d66e50587da5e9c2810b2c1d81eb25452c13d5268758906153.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e5ce8ec_4180_402e_b931_a24bb7f46fc7.slice/crio-conmon-a5929a36b1e333d66e50587da5e9c2810b2c1d81eb25452c13d5268758906153.scope\": RecentStats: unable to find data in memory cache]" Feb 02 09:13:53 crc kubenswrapper[4747]: I0202 09:13:53.229012 4747 generic.go:334] "Generic (PLEG): container finished" podID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerID="a5929a36b1e333d66e50587da5e9c2810b2c1d81eb25452c13d5268758906153" exitCode=137 Feb 02 09:13:53 crc kubenswrapper[4747]: I0202 09:13:53.229113 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerDied","Data":"a5929a36b1e333d66e50587da5e9c2810b2c1d81eb25452c13d5268758906153"} Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.182583 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.293687 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ff49e6e-949d-4f1e-85d6-108fa5edfece","Type":"ContainerStarted","Data":"ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82"} Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.305442 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="d2182862-d41c-4cc6-a47f-d67738c10793" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://680fc39bdc18300762da33ddc18b525ece6d207ed4ea281091cc9ae502680fbe" gracePeriod=30 Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.324947 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e5ce8ec-4180-402e-b931-a24bb7f46fc7","Type":"ContainerDied","Data":"272b00933507964b260fac3691868b14db11fbca40cbbefa1c3d8094d2776b68"} Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.325006 4747 scope.go:117] "RemoveContainer" containerID="a5929a36b1e333d66e50587da5e9c2810b2c1d81eb25452c13d5268758906153" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.325347 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.338545 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.107268036 podStartE2EDuration="5.338521007s" podCreationTimestamp="2026-02-02 09:13:49 +0000 UTC" firstStartedPulling="2026-02-02 09:13:50.488979406 +0000 UTC m=+1043.033317839" lastFinishedPulling="2026-02-02 09:13:53.720232377 +0000 UTC m=+1046.264570810" observedRunningTime="2026-02-02 09:13:54.33128618 +0000 UTC m=+1046.875624623" watchObservedRunningTime="2026-02-02 09:13:54.338521007 +0000 UTC m=+1046.882859440" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.361188 4747 scope.go:117] "RemoveContainer" containerID="20f1540204f3a666b3236176863ff53cbc5d36ec09df7d5703e1e1cdce518adc" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.379901 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-config-data\") pod \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.379983 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-combined-ca-bundle\") pod \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.380031 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-scripts\") pod \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.380065 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-log-httpd\") pod \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.380136 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfhmh\" (UniqueName: \"kubernetes.io/projected/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-kube-api-access-gfhmh\") pod \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.380202 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-run-httpd\") pod \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.380233 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-sg-core-conf-yaml\") pod \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\" (UID: \"9e5ce8ec-4180-402e-b931-a24bb7f46fc7\") " Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.387800 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9e5ce8ec-4180-402e-b931-a24bb7f46fc7" (UID: "9e5ce8ec-4180-402e-b931-a24bb7f46fc7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.388040 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-scripts" (OuterVolumeSpecName: "scripts") pod "9e5ce8ec-4180-402e-b931-a24bb7f46fc7" (UID: "9e5ce8ec-4180-402e-b931-a24bb7f46fc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.388653 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-kube-api-access-gfhmh" (OuterVolumeSpecName: "kube-api-access-gfhmh") pod "9e5ce8ec-4180-402e-b931-a24bb7f46fc7" (UID: "9e5ce8ec-4180-402e-b931-a24bb7f46fc7"). InnerVolumeSpecName "kube-api-access-gfhmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.388775 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9e5ce8ec-4180-402e-b931-a24bb7f46fc7" (UID: "9e5ce8ec-4180-402e-b931-a24bb7f46fc7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.405311 4747 scope.go:117] "RemoveContainer" containerID="8ed993d8e7466c4f036c43baa4fe684ae318bfd4d3c4a998fca48ef178b0579c" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.430559 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9e5ce8ec-4180-402e-b931-a24bb7f46fc7" (UID: "9e5ce8ec-4180-402e-b931-a24bb7f46fc7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.446895 4747 scope.go:117] "RemoveContainer" containerID="f1d440a5575d6bc6d5b9cf17a8bd2992628327a95e7b020f7e22fecd89758377" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.483486 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.483520 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.483529 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfhmh\" (UniqueName: \"kubernetes.io/projected/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-kube-api-access-gfhmh\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.483537 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.483548 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.553065 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e5ce8ec-4180-402e-b931-a24bb7f46fc7" (UID: "9e5ce8ec-4180-402e-b931-a24bb7f46fc7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.561877 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-config-data" (OuterVolumeSpecName: "config-data") pod "9e5ce8ec-4180-402e-b931-a24bb7f46fc7" (UID: "9e5ce8ec-4180-402e-b931-a24bb7f46fc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.585466 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.585516 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5ce8ec-4180-402e-b931-a24bb7f46fc7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.664774 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.676295 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.688824 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:54 crc kubenswrapper[4747]: E0202 09:13:54.689379 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-central-agent" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.689452 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-central-agent" Feb 02 09:13:54 crc kubenswrapper[4747]: E0202 09:13:54.689543 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="proxy-httpd" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.689596 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="proxy-httpd" Feb 02 09:13:54 crc kubenswrapper[4747]: E0202 09:13:54.689661 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="sg-core" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.689731 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="sg-core" Feb 02 09:13:54 crc kubenswrapper[4747]: E0202 09:13:54.689802 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-notification-agent" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.689869 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-notification-agent" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.690096 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="proxy-httpd" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.690171 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-central-agent" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.690232 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="ceilometer-notification-agent" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.690295 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" containerName="sg-core" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.692689 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.698485 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.698948 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.706977 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.788839 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.788889 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhp5k\" (UniqueName: \"kubernetes.io/projected/23da7215-a6e3-49e7-9c46-c20e3963f70a-kube-api-access-dhp5k\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.788924 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-config-data\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.789078 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-run-httpd\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.789115 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-scripts\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.789141 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-log-httpd\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.789162 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.852470 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.896218 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-log-httpd\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.896811 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.896735 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-log-httpd\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.897115 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.897617 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhp5k\" (UniqueName: \"kubernetes.io/projected/23da7215-a6e3-49e7-9c46-c20e3963f70a-kube-api-access-dhp5k\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.898246 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-config-data\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.898419 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-run-httpd\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.898915 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-run-httpd\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.899063 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-scripts\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.902062 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.902760 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-config-data\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.906696 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.907411 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-scripts\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:54 crc kubenswrapper[4747]: I0202 09:13:54.922285 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhp5k\" (UniqueName: \"kubernetes.io/projected/23da7215-a6e3-49e7-9c46-c20e3963f70a-kube-api-access-dhp5k\") pod \"ceilometer-0\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " pod="openstack/ceilometer-0" Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.096951 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.347816 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"15a70c7a-5b74-4348-93e8-2b204ca16d11","Type":"ContainerStarted","Data":"72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117"} Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.356222 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ff49e6e-949d-4f1e-85d6-108fa5edfece","Type":"ContainerStarted","Data":"c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681"} Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.366852 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d2182862-d41c-4cc6-a47f-d67738c10793","Type":"ContainerStarted","Data":"680fc39bdc18300762da33ddc18b525ece6d207ed4ea281091cc9ae502680fbe"} Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.368663 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.250718039 podStartE2EDuration="6.368649962s" podCreationTimestamp="2026-02-02 09:13:49 +0000 UTC" firstStartedPulling="2026-02-02 09:13:50.602367176 +0000 UTC m=+1043.146705609" lastFinishedPulling="2026-02-02 09:13:53.720299099 +0000 UTC m=+1046.264637532" observedRunningTime="2026-02-02 09:13:55.367099822 +0000 UTC m=+1047.911438265" watchObservedRunningTime="2026-02-02 09:13:55.368649962 +0000 UTC m=+1047.912988385" Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.400892 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.9107859449999998 podStartE2EDuration="6.400878942s" podCreationTimestamp="2026-02-02 09:13:49 +0000 UTC" firstStartedPulling="2026-02-02 09:13:50.242036307 +0000 UTC m=+1042.786374730" lastFinishedPulling="2026-02-02 09:13:53.732129284 +0000 UTC m=+1046.276467727" observedRunningTime="2026-02-02 09:13:55.399816094 +0000 UTC m=+1047.944154527" watchObservedRunningTime="2026-02-02 09:13:55.400878942 +0000 UTC m=+1047.945217375" Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.406639 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d627c3b6-9a6d-480a-9d7c-8c72151076bf","Type":"ContainerStarted","Data":"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac"} Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.406855 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d627c3b6-9a6d-480a-9d7c-8c72151076bf","Type":"ContainerStarted","Data":"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec"} Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.406961 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-metadata" containerID="cri-o://1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac" gracePeriod=30 Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.406797 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-log" containerID="cri-o://15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec" gracePeriod=30 Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.440862 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.207961328 podStartE2EDuration="6.440839581s" podCreationTimestamp="2026-02-02 09:13:49 +0000 UTC" firstStartedPulling="2026-02-02 09:13:50.487345684 +0000 UTC m=+1043.031684117" lastFinishedPulling="2026-02-02 09:13:53.720223937 +0000 UTC m=+1046.264562370" observedRunningTime="2026-02-02 09:13:55.430854924 +0000 UTC m=+1047.975193357" watchObservedRunningTime="2026-02-02 09:13:55.440839581 +0000 UTC m=+1047.985178014" Feb 02 09:13:55 crc kubenswrapper[4747]: W0202 09:13:55.941724 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23da7215_a6e3_49e7_9c46_c20e3963f70a.slice/crio-f734e9860ba85ae497c175e62bbdd6c2c410b6c9adb4fbfed4627c2161059c42 WatchSource:0}: Error finding container f734e9860ba85ae497c175e62bbdd6c2c410b6c9adb4fbfed4627c2161059c42: Status 404 returned error can't find the container with id f734e9860ba85ae497c175e62bbdd6c2c410b6c9adb4fbfed4627c2161059c42 Feb 02 09:13:55 crc kubenswrapper[4747]: I0202 09:13:55.968733 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.111069 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.243487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-combined-ca-bundle\") pod \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.243613 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z8cx\" (UniqueName: \"kubernetes.io/projected/d627c3b6-9a6d-480a-9d7c-8c72151076bf-kube-api-access-7z8cx\") pod \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.243769 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-config-data\") pod \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.244486 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d627c3b6-9a6d-480a-9d7c-8c72151076bf-logs\") pod \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\" (UID: \"d627c3b6-9a6d-480a-9d7c-8c72151076bf\") " Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.244960 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d627c3b6-9a6d-480a-9d7c-8c72151076bf-logs" (OuterVolumeSpecName: "logs") pod "d627c3b6-9a6d-480a-9d7c-8c72151076bf" (UID: "d627c3b6-9a6d-480a-9d7c-8c72151076bf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.245299 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d627c3b6-9a6d-480a-9d7c-8c72151076bf-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.253157 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d627c3b6-9a6d-480a-9d7c-8c72151076bf-kube-api-access-7z8cx" (OuterVolumeSpecName: "kube-api-access-7z8cx") pod "d627c3b6-9a6d-480a-9d7c-8c72151076bf" (UID: "d627c3b6-9a6d-480a-9d7c-8c72151076bf"). InnerVolumeSpecName "kube-api-access-7z8cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.291881 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-config-data" (OuterVolumeSpecName: "config-data") pod "d627c3b6-9a6d-480a-9d7c-8c72151076bf" (UID: "d627c3b6-9a6d-480a-9d7c-8c72151076bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.321399 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d627c3b6-9a6d-480a-9d7c-8c72151076bf" (UID: "d627c3b6-9a6d-480a-9d7c-8c72151076bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.347262 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.347290 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d627c3b6-9a6d-480a-9d7c-8c72151076bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.347303 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z8cx\" (UniqueName: \"kubernetes.io/projected/d627c3b6-9a6d-480a-9d7c-8c72151076bf-kube-api-access-7z8cx\") on node \"crc\" DevicePath \"\"" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.352671 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e5ce8ec-4180-402e-b931-a24bb7f46fc7" path="/var/lib/kubelet/pods/9e5ce8ec-4180-402e-b931-a24bb7f46fc7/volumes" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.416505 4747 generic.go:334] "Generic (PLEG): container finished" podID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerID="1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac" exitCode=0 Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.416536 4747 generic.go:334] "Generic (PLEG): container finished" podID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerID="15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec" exitCode=143 Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.416562 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.416581 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d627c3b6-9a6d-480a-9d7c-8c72151076bf","Type":"ContainerDied","Data":"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac"} Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.416724 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d627c3b6-9a6d-480a-9d7c-8c72151076bf","Type":"ContainerDied","Data":"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec"} Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.416761 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d627c3b6-9a6d-480a-9d7c-8c72151076bf","Type":"ContainerDied","Data":"bfd8fc92d0ef9095922919e0c9f373c46ab2dc485db19d2d029d85b8bbcbd029"} Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.416784 4747 scope.go:117] "RemoveContainer" containerID="1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.418064 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerStarted","Data":"f734e9860ba85ae497c175e62bbdd6c2c410b6c9adb4fbfed4627c2161059c42"} Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.442655 4747 scope.go:117] "RemoveContainer" containerID="15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.447077 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.461975 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.503801 4747 scope.go:117] "RemoveContainer" containerID="1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.503968 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:56 crc kubenswrapper[4747]: E0202 09:13:56.504921 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-metadata" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.504957 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-metadata" Feb 02 09:13:56 crc kubenswrapper[4747]: E0202 09:13:56.504989 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-log" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.505000 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-log" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.505464 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-metadata" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.505524 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" containerName="nova-metadata-log" Feb 02 09:13:56 crc kubenswrapper[4747]: E0202 09:13:56.506099 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac\": container with ID starting with 1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac not found: ID does not exist" containerID="1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.506178 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac"} err="failed to get container status \"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac\": rpc error: code = NotFound desc = could not find container \"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac\": container with ID starting with 1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac not found: ID does not exist" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.506207 4747 scope.go:117] "RemoveContainer" containerID="15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.507551 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: E0202 09:13:56.508043 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec\": container with ID starting with 15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec not found: ID does not exist" containerID="15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.508070 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec"} err="failed to get container status \"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec\": rpc error: code = NotFound desc = could not find container \"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec\": container with ID starting with 15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec not found: ID does not exist" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.508086 4747 scope.go:117] "RemoveContainer" containerID="1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.508637 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac"} err="failed to get container status \"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac\": rpc error: code = NotFound desc = could not find container \"1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac\": container with ID starting with 1e180a85484191f21ea7aa9a39ff6369a68b6abbe15df0dbffa7dbb3cae3f9ac not found: ID does not exist" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.508663 4747 scope.go:117] "RemoveContainer" containerID="15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.511539 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.514464 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec"} err="failed to get container status \"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec\": rpc error: code = NotFound desc = could not find container \"15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec\": container with ID starting with 15d2f6e18d1367b4cefa3ac5662f50767813015caca1846ebaa635e34b4872ec not found: ID does not exist" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.518385 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.549106 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.662234 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.662282 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.662463 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c81b2a-72d4-4fab-b70f-021a08f93c96-logs\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.662582 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-config-data\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.662991 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxvdq\" (UniqueName: \"kubernetes.io/projected/a9c81b2a-72d4-4fab-b70f-021a08f93c96-kube-api-access-xxvdq\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.764707 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-config-data\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.765001 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxvdq\" (UniqueName: \"kubernetes.io/projected/a9c81b2a-72d4-4fab-b70f-021a08f93c96-kube-api-access-xxvdq\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.765108 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.765217 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.765298 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c81b2a-72d4-4fab-b70f-021a08f93c96-logs\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.765772 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c81b2a-72d4-4fab-b70f-021a08f93c96-logs\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.770055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.770753 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-config-data\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.773470 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.790530 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxvdq\" (UniqueName: \"kubernetes.io/projected/a9c81b2a-72d4-4fab-b70f-021a08f93c96-kube-api-access-xxvdq\") pod \"nova-metadata-0\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " pod="openstack/nova-metadata-0" Feb 02 09:13:56 crc kubenswrapper[4747]: I0202 09:13:56.855511 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:13:57 crc kubenswrapper[4747]: I0202 09:13:57.429404 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerStarted","Data":"8b38a93253684a765f5fb4dabb6108a401bcb36e4970c8e244a2190219ed0a33"} Feb 02 09:13:58 crc kubenswrapper[4747]: I0202 09:13:58.097859 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:13:58 crc kubenswrapper[4747]: I0202 09:13:58.358790 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d627c3b6-9a6d-480a-9d7c-8c72151076bf" path="/var/lib/kubelet/pods/d627c3b6-9a6d-480a-9d7c-8c72151076bf/volumes" Feb 02 09:13:58 crc kubenswrapper[4747]: I0202 09:13:58.453555 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerStarted","Data":"db89047dae02a55848379f48c04278e431de56457150cd0068a6f37a09f8e660"} Feb 02 09:13:58 crc kubenswrapper[4747]: I0202 09:13:58.456113 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a9c81b2a-72d4-4fab-b70f-021a08f93c96","Type":"ContainerStarted","Data":"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e"} Feb 02 09:13:58 crc kubenswrapper[4747]: I0202 09:13:58.456158 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a9c81b2a-72d4-4fab-b70f-021a08f93c96","Type":"ContainerStarted","Data":"4cc815f58d53cbe94842ab60ea6d43fcfc647913d587d9c971a808a0a634bb08"} Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.466867 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a9c81b2a-72d4-4fab-b70f-021a08f93c96","Type":"ContainerStarted","Data":"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d"} Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.469384 4747 generic.go:334] "Generic (PLEG): container finished" podID="cc167b60-b071-4a53-9cac-27a8dd516321" containerID="40aba8b534e1c3a42cb1565e2ed91ffcebe9f7b07ef15fa63c9a312e131e2d94" exitCode=0 Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.469471 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qqtqd" event={"ID":"cc167b60-b071-4a53-9cac-27a8dd516321","Type":"ContainerDied","Data":"40aba8b534e1c3a42cb1565e2ed91ffcebe9f7b07ef15fa63c9a312e131e2d94"} Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.472565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerStarted","Data":"15fb323e7e0868754ec715d8afbd9c2a7c9996ab93abd2b8f1402fbf6d10d250"} Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.496906 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.496860699 podStartE2EDuration="3.496860699s" podCreationTimestamp="2026-02-02 09:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:13:59.488603877 +0000 UTC m=+1052.032942340" watchObservedRunningTime="2026-02-02 09:13:59.496860699 +0000 UTC m=+1052.041199142" Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.655121 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.655264 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.930078 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.930118 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.940147 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:13:59 crc kubenswrapper[4747]: I0202 09:13:59.993635 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.038533 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-pq9cn"] Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.038839 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" podUID="7615847e-cebb-4f8a-a453-7ae866963464" containerName="dnsmasq-dns" containerID="cri-o://d8df1ae2a13c094d440863515caa194ba0e3d04a95db19c5adb8dd64e26b683b" gracePeriod=10 Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.485664 4747 generic.go:334] "Generic (PLEG): container finished" podID="7615847e-cebb-4f8a-a453-7ae866963464" containerID="d8df1ae2a13c094d440863515caa194ba0e3d04a95db19c5adb8dd64e26b683b" exitCode=0 Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.486865 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" event={"ID":"7615847e-cebb-4f8a-a453-7ae866963464","Type":"ContainerDied","Data":"d8df1ae2a13c094d440863515caa194ba0e3d04a95db19c5adb8dd64e26b683b"} Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.606155 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.740079 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.740580 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.788178 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.862835 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-svc\") pod \"7615847e-cebb-4f8a-a453-7ae866963464\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.864807 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-config\") pod \"7615847e-cebb-4f8a-a453-7ae866963464\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.864885 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-swift-storage-0\") pod \"7615847e-cebb-4f8a-a453-7ae866963464\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.864903 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-nb\") pod \"7615847e-cebb-4f8a-a453-7ae866963464\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.864953 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-sb\") pod \"7615847e-cebb-4f8a-a453-7ae866963464\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.864983 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjvm2\" (UniqueName: \"kubernetes.io/projected/7615847e-cebb-4f8a-a453-7ae866963464-kube-api-access-bjvm2\") pod \"7615847e-cebb-4f8a-a453-7ae866963464\" (UID: \"7615847e-cebb-4f8a-a453-7ae866963464\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.875849 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7615847e-cebb-4f8a-a453-7ae866963464-kube-api-access-bjvm2" (OuterVolumeSpecName: "kube-api-access-bjvm2") pod "7615847e-cebb-4f8a-a453-7ae866963464" (UID: "7615847e-cebb-4f8a-a453-7ae866963464"). InnerVolumeSpecName "kube-api-access-bjvm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.884271 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.946730 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-config" (OuterVolumeSpecName: "config") pod "7615847e-cebb-4f8a-a453-7ae866963464" (UID: "7615847e-cebb-4f8a-a453-7ae866963464"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.964909 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7615847e-cebb-4f8a-a453-7ae866963464" (UID: "7615847e-cebb-4f8a-a453-7ae866963464"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.965475 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7615847e-cebb-4f8a-a453-7ae866963464" (UID: "7615847e-cebb-4f8a-a453-7ae866963464"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.968920 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-combined-ca-bundle\") pod \"cc167b60-b071-4a53-9cac-27a8dd516321\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.969159 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-scripts\") pod \"cc167b60-b071-4a53-9cac-27a8dd516321\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.969228 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lthl\" (UniqueName: \"kubernetes.io/projected/cc167b60-b071-4a53-9cac-27a8dd516321-kube-api-access-6lthl\") pod \"cc167b60-b071-4a53-9cac-27a8dd516321\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.969310 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-config-data\") pod \"cc167b60-b071-4a53-9cac-27a8dd516321\" (UID: \"cc167b60-b071-4a53-9cac-27a8dd516321\") " Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.970360 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.970391 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.970407 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjvm2\" (UniqueName: \"kubernetes.io/projected/7615847e-cebb-4f8a-a453-7ae866963464-kube-api-access-bjvm2\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.970461 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.970521 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7615847e-cebb-4f8a-a453-7ae866963464" (UID: "7615847e-cebb-4f8a-a453-7ae866963464"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.973288 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-scripts" (OuterVolumeSpecName: "scripts") pod "cc167b60-b071-4a53-9cac-27a8dd516321" (UID: "cc167b60-b071-4a53-9cac-27a8dd516321"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.982132 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc167b60-b071-4a53-9cac-27a8dd516321-kube-api-access-6lthl" (OuterVolumeSpecName: "kube-api-access-6lthl") pod "cc167b60-b071-4a53-9cac-27a8dd516321" (UID: "cc167b60-b071-4a53-9cac-27a8dd516321"). InnerVolumeSpecName "kube-api-access-6lthl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:00 crc kubenswrapper[4747]: I0202 09:14:00.999337 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-config-data" (OuterVolumeSpecName: "config-data") pod "cc167b60-b071-4a53-9cac-27a8dd516321" (UID: "cc167b60-b071-4a53-9cac-27a8dd516321"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.008434 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7615847e-cebb-4f8a-a453-7ae866963464" (UID: "7615847e-cebb-4f8a-a453-7ae866963464"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.016281 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc167b60-b071-4a53-9cac-27a8dd516321" (UID: "cc167b60-b071-4a53-9cac-27a8dd516321"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.071861 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.071899 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7615847e-cebb-4f8a-a453-7ae866963464-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.071911 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.071921 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.071930 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lthl\" (UniqueName: \"kubernetes.io/projected/cc167b60-b071-4a53-9cac-27a8dd516321-kube-api-access-6lthl\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.071953 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc167b60-b071-4a53-9cac-27a8dd516321-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.496482 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qqtqd" event={"ID":"cc167b60-b071-4a53-9cac-27a8dd516321","Type":"ContainerDied","Data":"5b2f0a652c6da96e7514f14f22f23be68ce2ca68ad456c82ef992cc60e48cd9d"} Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.496524 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b2f0a652c6da96e7514f14f22f23be68ce2ca68ad456c82ef992cc60e48cd9d" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.496585 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qqtqd" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.502635 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerStarted","Data":"6cdb4b94ce2e2b7f0f7d30352ff2ddf611d3f4c68e40d95363f7ceb110274cc8"} Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.502769 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.504385 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" event={"ID":"7615847e-cebb-4f8a-a453-7ae866963464","Type":"ContainerDied","Data":"e88c40de8be7f9cc4247b640786b8a71078ad958c852db929460d35bd5a1d5c9"} Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.504455 4747 scope.go:117] "RemoveContainer" containerID="d8df1ae2a13c094d440863515caa194ba0e3d04a95db19c5adb8dd64e26b683b" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.504403 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-pq9cn" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.508349 4747 generic.go:334] "Generic (PLEG): container finished" podID="4675701d-dd13-435f-ada3-53023492cb1e" containerID="d684bb3a72710f3d15448c93b8d1b0f0fc98cdda20a2cf9c1c5b4401db9e1592" exitCode=0 Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.508441 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j5llb" event={"ID":"4675701d-dd13-435f-ada3-53023492cb1e","Type":"ContainerDied","Data":"d684bb3a72710f3d15448c93b8d1b0f0fc98cdda20a2cf9c1c5b4401db9e1592"} Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.527377 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.894578373 podStartE2EDuration="7.527358772s" podCreationTimestamp="2026-02-02 09:13:54 +0000 UTC" firstStartedPulling="2026-02-02 09:13:55.945233549 +0000 UTC m=+1048.489571982" lastFinishedPulling="2026-02-02 09:14:00.578013948 +0000 UTC m=+1053.122352381" observedRunningTime="2026-02-02 09:14:01.526453289 +0000 UTC m=+1054.070791722" watchObservedRunningTime="2026-02-02 09:14:01.527358772 +0000 UTC m=+1054.071697195" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.559811 4747 scope.go:117] "RemoveContainer" containerID="25d0928c2ccecefa42e3a7bebc3783b98e3ab7ded40aa37b406d59c26bd44309" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.597600 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-pq9cn"] Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.612341 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-pq9cn"] Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.708513 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.708740 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-log" containerID="cri-o://ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82" gracePeriod=30 Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.709503 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-api" containerID="cri-o://c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681" gracePeriod=30 Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.721202 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.756201 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.756448 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-log" containerID="cri-o://ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e" gracePeriod=30 Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.756795 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-metadata" containerID="cri-o://e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d" gracePeriod=30 Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.856954 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 09:14:01 crc kubenswrapper[4747]: I0202 09:14:01.857005 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.355249 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7615847e-cebb-4f8a-a453-7ae866963464" path="/var/lib/kubelet/pods/7615847e-cebb-4f8a-a453-7ae866963464/volumes" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.433776 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.508144 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c81b2a-72d4-4fab-b70f-021a08f93c96-logs\") pod \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.508209 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-config-data\") pod \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.508335 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-nova-metadata-tls-certs\") pod \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.508409 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxvdq\" (UniqueName: \"kubernetes.io/projected/a9c81b2a-72d4-4fab-b70f-021a08f93c96-kube-api-access-xxvdq\") pod \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.508446 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-combined-ca-bundle\") pod \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\" (UID: \"a9c81b2a-72d4-4fab-b70f-021a08f93c96\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.510479 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9c81b2a-72d4-4fab-b70f-021a08f93c96-logs" (OuterVolumeSpecName: "logs") pod "a9c81b2a-72d4-4fab-b70f-021a08f93c96" (UID: "a9c81b2a-72d4-4fab-b70f-021a08f93c96"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.528304 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9c81b2a-72d4-4fab-b70f-021a08f93c96-kube-api-access-xxvdq" (OuterVolumeSpecName: "kube-api-access-xxvdq") pod "a9c81b2a-72d4-4fab-b70f-021a08f93c96" (UID: "a9c81b2a-72d4-4fab-b70f-021a08f93c96"). InnerVolumeSpecName "kube-api-access-xxvdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.537756 4747 generic.go:334] "Generic (PLEG): container finished" podID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerID="ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82" exitCode=143 Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.537824 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ff49e6e-949d-4f1e-85d6-108fa5edfece","Type":"ContainerDied","Data":"ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82"} Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.550613 4747 generic.go:334] "Generic (PLEG): container finished" podID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerID="e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d" exitCode=0 Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.550653 4747 generic.go:334] "Generic (PLEG): container finished" podID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerID="ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e" exitCode=143 Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.550856 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.551017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a9c81b2a-72d4-4fab-b70f-021a08f93c96","Type":"ContainerDied","Data":"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d"} Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.551120 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a9c81b2a-72d4-4fab-b70f-021a08f93c96","Type":"ContainerDied","Data":"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e"} Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.551178 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a9c81b2a-72d4-4fab-b70f-021a08f93c96","Type":"ContainerDied","Data":"4cc815f58d53cbe94842ab60ea6d43fcfc647913d587d9c971a808a0a634bb08"} Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.551239 4747 scope.go:117] "RemoveContainer" containerID="e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.557241 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="15a70c7a-5b74-4348-93e8-2b204ca16d11" containerName="nova-scheduler-scheduler" containerID="cri-o://72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" gracePeriod=30 Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.574839 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9c81b2a-72d4-4fab-b70f-021a08f93c96" (UID: "a9c81b2a-72d4-4fab-b70f-021a08f93c96"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.610510 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxvdq\" (UniqueName: \"kubernetes.io/projected/a9c81b2a-72d4-4fab-b70f-021a08f93c96-kube-api-access-xxvdq\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.610549 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.610561 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9c81b2a-72d4-4fab-b70f-021a08f93c96-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.616106 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "a9c81b2a-72d4-4fab-b70f-021a08f93c96" (UID: "a9c81b2a-72d4-4fab-b70f-021a08f93c96"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.620469 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-config-data" (OuterVolumeSpecName: "config-data") pod "a9c81b2a-72d4-4fab-b70f-021a08f93c96" (UID: "a9c81b2a-72d4-4fab-b70f-021a08f93c96"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.684514 4747 scope.go:117] "RemoveContainer" containerID="ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.708418 4747 scope.go:117] "RemoveContainer" containerID="e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d" Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.709890 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d\": container with ID starting with e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d not found: ID does not exist" containerID="e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.709949 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d"} err="failed to get container status \"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d\": rpc error: code = NotFound desc = could not find container \"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d\": container with ID starting with e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d not found: ID does not exist" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.709979 4747 scope.go:117] "RemoveContainer" containerID="ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e" Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.717446 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e\": container with ID starting with ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e not found: ID does not exist" containerID="ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.717497 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e"} err="failed to get container status \"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e\": rpc error: code = NotFound desc = could not find container \"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e\": container with ID starting with ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e not found: ID does not exist" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.717528 4747 scope.go:117] "RemoveContainer" containerID="e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.717814 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d"} err="failed to get container status \"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d\": rpc error: code = NotFound desc = could not find container \"e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d\": container with ID starting with e38de9418986f39164419e3c6c3701ecf63200f5d07242be7cfb94e89806be1d not found: ID does not exist" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.717834 4747 scope.go:117] "RemoveContainer" containerID="ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.718070 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e"} err="failed to get container status \"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e\": rpc error: code = NotFound desc = could not find container \"ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e\": container with ID starting with ac3e499028ce63a50518b757ac9f679c284396024f5f2721b7cd0f8ec98e0a4e not found: ID does not exist" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.724732 4747 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.724771 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9c81b2a-72d4-4fab-b70f-021a08f93c96-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.866991 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.927174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lspd\" (UniqueName: \"kubernetes.io/projected/4675701d-dd13-435f-ada3-53023492cb1e-kube-api-access-8lspd\") pod \"4675701d-dd13-435f-ada3-53023492cb1e\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.929804 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-config-data\") pod \"4675701d-dd13-435f-ada3-53023492cb1e\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.929841 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-combined-ca-bundle\") pod \"4675701d-dd13-435f-ada3-53023492cb1e\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.929889 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-scripts\") pod \"4675701d-dd13-435f-ada3-53023492cb1e\" (UID: \"4675701d-dd13-435f-ada3-53023492cb1e\") " Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.927436 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.935006 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4675701d-dd13-435f-ada3-53023492cb1e-kube-api-access-8lspd" (OuterVolumeSpecName: "kube-api-access-8lspd") pod "4675701d-dd13-435f-ada3-53023492cb1e" (UID: "4675701d-dd13-435f-ada3-53023492cb1e"). InnerVolumeSpecName "kube-api-access-8lspd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.935702 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-scripts" (OuterVolumeSpecName: "scripts") pod "4675701d-dd13-435f-ada3-53023492cb1e" (UID: "4675701d-dd13-435f-ada3-53023492cb1e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.943562 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.957065 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.958426 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc167b60-b071-4a53-9cac-27a8dd516321" containerName="nova-manage" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.958478 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc167b60-b071-4a53-9cac-27a8dd516321" containerName="nova-manage" Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.958505 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4675701d-dd13-435f-ada3-53023492cb1e" containerName="nova-cell1-conductor-db-sync" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.958514 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4675701d-dd13-435f-ada3-53023492cb1e" containerName="nova-cell1-conductor-db-sync" Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.958567 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-log" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.958579 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-log" Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.958591 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7615847e-cebb-4f8a-a453-7ae866963464" containerName="init" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.958598 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7615847e-cebb-4f8a-a453-7ae866963464" containerName="init" Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.958607 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7615847e-cebb-4f8a-a453-7ae866963464" containerName="dnsmasq-dns" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.958642 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7615847e-cebb-4f8a-a453-7ae866963464" containerName="dnsmasq-dns" Feb 02 09:14:02 crc kubenswrapper[4747]: E0202 09:14:02.958664 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-metadata" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.958671 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-metadata" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.958987 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc167b60-b071-4a53-9cac-27a8dd516321" containerName="nova-manage" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.959013 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-log" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.959060 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" containerName="nova-metadata-metadata" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.959074 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4675701d-dd13-435f-ada3-53023492cb1e" containerName="nova-cell1-conductor-db-sync" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.959091 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7615847e-cebb-4f8a-a453-7ae866963464" containerName="dnsmasq-dns" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.960496 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.962849 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.963351 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.966981 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.987133 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-config-data" (OuterVolumeSpecName: "config-data") pod "4675701d-dd13-435f-ada3-53023492cb1e" (UID: "4675701d-dd13-435f-ada3-53023492cb1e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:02 crc kubenswrapper[4747]: I0202 09:14:02.987482 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4675701d-dd13-435f-ada3-53023492cb1e" (UID: "4675701d-dd13-435f-ada3-53023492cb1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.032425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q8d6\" (UniqueName: \"kubernetes.io/projected/902a6326-e93d-4f54-b760-62f58b36f0f7-kube-api-access-5q8d6\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.032519 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-config-data\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.032678 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.032854 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.033043 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/902a6326-e93d-4f54-b760-62f58b36f0f7-logs\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.033208 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.033232 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.033247 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4675701d-dd13-435f-ada3-53023492cb1e-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.033259 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lspd\" (UniqueName: \"kubernetes.io/projected/4675701d-dd13-435f-ada3-53023492cb1e-kube-api-access-8lspd\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.134265 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.134340 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/902a6326-e93d-4f54-b760-62f58b36f0f7-logs\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.134393 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q8d6\" (UniqueName: \"kubernetes.io/projected/902a6326-e93d-4f54-b760-62f58b36f0f7-kube-api-access-5q8d6\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.134446 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-config-data\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.134479 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.134807 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/902a6326-e93d-4f54-b760-62f58b36f0f7-logs\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.137905 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.138734 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-config-data\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.138959 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.155557 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q8d6\" (UniqueName: \"kubernetes.io/projected/902a6326-e93d-4f54-b760-62f58b36f0f7-kube-api-access-5q8d6\") pod \"nova-metadata-0\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.276570 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.566452 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-j5llb" event={"ID":"4675701d-dd13-435f-ada3-53023492cb1e","Type":"ContainerDied","Data":"ae3fbf14e9091857aec88acd1201582e34191f824ac3931ef9f6aeca84675ce5"} Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.566495 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae3fbf14e9091857aec88acd1201582e34191f824ac3931ef9f6aeca84675ce5" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.566546 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-j5llb" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.622085 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.623501 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.632348 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.646165 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.736361 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:03 crc kubenswrapper[4747]: W0202 09:14:03.736641 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod902a6326_e93d_4f54_b760_62f58b36f0f7.slice/crio-bc795209b9140c4604c2da9754565a5144d7b0c842d0dd57422b1512a01ed250 WatchSource:0}: Error finding container bc795209b9140c4604c2da9754565a5144d7b0c842d0dd57422b1512a01ed250: Status 404 returned error can't find the container with id bc795209b9140c4604c2da9754565a5144d7b0c842d0dd57422b1512a01ed250 Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.744740 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmp2m\" (UniqueName: \"kubernetes.io/projected/ec1fef68-e209-465e-a237-cce8f03abf20-kube-api-access-zmp2m\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.744790 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1fef68-e209-465e-a237-cce8f03abf20-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.744973 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1fef68-e209-465e-a237-cce8f03abf20-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.847276 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1fef68-e209-465e-a237-cce8f03abf20-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.848145 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmp2m\" (UniqueName: \"kubernetes.io/projected/ec1fef68-e209-465e-a237-cce8f03abf20-kube-api-access-zmp2m\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.848322 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1fef68-e209-465e-a237-cce8f03abf20-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.853799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1fef68-e209-465e-a237-cce8f03abf20-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.860095 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1fef68-e209-465e-a237-cce8f03abf20-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.870603 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmp2m\" (UniqueName: \"kubernetes.io/projected/ec1fef68-e209-465e-a237-cce8f03abf20-kube-api-access-zmp2m\") pod \"nova-cell1-conductor-0\" (UID: \"ec1fef68-e209-465e-a237-cce8f03abf20\") " pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:03 crc kubenswrapper[4747]: I0202 09:14:03.942840 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:04 crc kubenswrapper[4747]: I0202 09:14:04.393110 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9c81b2a-72d4-4fab-b70f-021a08f93c96" path="/var/lib/kubelet/pods/a9c81b2a-72d4-4fab-b70f-021a08f93c96/volumes" Feb 02 09:14:04 crc kubenswrapper[4747]: I0202 09:14:04.424463 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 09:14:04 crc kubenswrapper[4747]: W0202 09:14:04.427851 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec1fef68_e209_465e_a237_cce8f03abf20.slice/crio-145ec8c520e5b7046c6c7d2f2d39686460c016ad91bc8ffd1482871e66f885ad WatchSource:0}: Error finding container 145ec8c520e5b7046c6c7d2f2d39686460c016ad91bc8ffd1482871e66f885ad: Status 404 returned error can't find the container with id 145ec8c520e5b7046c6c7d2f2d39686460c016ad91bc8ffd1482871e66f885ad Feb 02 09:14:04 crc kubenswrapper[4747]: I0202 09:14:04.582372 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ec1fef68-e209-465e-a237-cce8f03abf20","Type":"ContainerStarted","Data":"145ec8c520e5b7046c6c7d2f2d39686460c016ad91bc8ffd1482871e66f885ad"} Feb 02 09:14:04 crc kubenswrapper[4747]: I0202 09:14:04.583616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"902a6326-e93d-4f54-b760-62f58b36f0f7","Type":"ContainerStarted","Data":"2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723"} Feb 02 09:14:04 crc kubenswrapper[4747]: I0202 09:14:04.583649 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"902a6326-e93d-4f54-b760-62f58b36f0f7","Type":"ContainerStarted","Data":"d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925"} Feb 02 09:14:04 crc kubenswrapper[4747]: I0202 09:14:04.583664 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"902a6326-e93d-4f54-b760-62f58b36f0f7","Type":"ContainerStarted","Data":"bc795209b9140c4604c2da9754565a5144d7b0c842d0dd57422b1512a01ed250"} Feb 02 09:14:04 crc kubenswrapper[4747]: I0202 09:14:04.623377 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.623360272 podStartE2EDuration="2.623360272s" podCreationTimestamp="2026-02-02 09:14:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:04.61628562 +0000 UTC m=+1057.160624073" watchObservedRunningTime="2026-02-02 09:14:04.623360272 +0000 UTC m=+1057.167698705" Feb 02 09:14:04 crc kubenswrapper[4747]: E0202 09:14:04.932974 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 09:14:04 crc kubenswrapper[4747]: E0202 09:14:04.934725 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 09:14:04 crc kubenswrapper[4747]: E0202 09:14:04.936772 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 09:14:04 crc kubenswrapper[4747]: E0202 09:14:04.936828 4747 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="15a70c7a-5b74-4348-93e8-2b204ca16d11" containerName="nova-scheduler-scheduler" Feb 02 09:14:05 crc kubenswrapper[4747]: I0202 09:14:05.594622 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ec1fef68-e209-465e-a237-cce8f03abf20","Type":"ContainerStarted","Data":"27f93f2b1b0bf03eb80bbd8fc363e747ab0027feaea8ceab8580793fa4d00804"} Feb 02 09:14:05 crc kubenswrapper[4747]: I0202 09:14:05.594954 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:05 crc kubenswrapper[4747]: I0202 09:14:05.616374 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.61635274 podStartE2EDuration="2.61635274s" podCreationTimestamp="2026-02-02 09:14:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:05.612743497 +0000 UTC m=+1058.157081970" watchObservedRunningTime="2026-02-02 09:14:05.61635274 +0000 UTC m=+1058.160691173" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.104008 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.224282 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-combined-ca-bundle\") pod \"15a70c7a-5b74-4348-93e8-2b204ca16d11\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.224380 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-config-data\") pod \"15a70c7a-5b74-4348-93e8-2b204ca16d11\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.224509 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpl8q\" (UniqueName: \"kubernetes.io/projected/15a70c7a-5b74-4348-93e8-2b204ca16d11-kube-api-access-mpl8q\") pod \"15a70c7a-5b74-4348-93e8-2b204ca16d11\" (UID: \"15a70c7a-5b74-4348-93e8-2b204ca16d11\") " Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.230792 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a70c7a-5b74-4348-93e8-2b204ca16d11-kube-api-access-mpl8q" (OuterVolumeSpecName: "kube-api-access-mpl8q") pod "15a70c7a-5b74-4348-93e8-2b204ca16d11" (UID: "15a70c7a-5b74-4348-93e8-2b204ca16d11"). InnerVolumeSpecName "kube-api-access-mpl8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.259376 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-config-data" (OuterVolumeSpecName: "config-data") pod "15a70c7a-5b74-4348-93e8-2b204ca16d11" (UID: "15a70c7a-5b74-4348-93e8-2b204ca16d11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.264288 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15a70c7a-5b74-4348-93e8-2b204ca16d11" (UID: "15a70c7a-5b74-4348-93e8-2b204ca16d11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.326959 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.327000 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a70c7a-5b74-4348-93e8-2b204ca16d11-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.327012 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpl8q\" (UniqueName: \"kubernetes.io/projected/15a70c7a-5b74-4348-93e8-2b204ca16d11-kube-api-access-mpl8q\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.568188 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.628562 4747 generic.go:334] "Generic (PLEG): container finished" podID="15a70c7a-5b74-4348-93e8-2b204ca16d11" containerID="72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" exitCode=0 Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.628617 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"15a70c7a-5b74-4348-93e8-2b204ca16d11","Type":"ContainerDied","Data":"72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117"} Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.628643 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"15a70c7a-5b74-4348-93e8-2b204ca16d11","Type":"ContainerDied","Data":"0fd68ee5813705ebc7c2ce65c77d1f75092219235706a13dab1188563667eafe"} Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.628658 4747 scope.go:117] "RemoveContainer" containerID="72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.628783 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.639187 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ff49e6e-949d-4f1e-85d6-108fa5edfece-logs\") pod \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.639367 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-combined-ca-bundle\") pod \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.639432 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g9nr\" (UniqueName: \"kubernetes.io/projected/3ff49e6e-949d-4f1e-85d6-108fa5edfece-kube-api-access-2g9nr\") pod \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.639505 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-config-data\") pod \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\" (UID: \"3ff49e6e-949d-4f1e-85d6-108fa5edfece\") " Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.639676 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ff49e6e-949d-4f1e-85d6-108fa5edfece-logs" (OuterVolumeSpecName: "logs") pod "3ff49e6e-949d-4f1e-85d6-108fa5edfece" (UID: "3ff49e6e-949d-4f1e-85d6-108fa5edfece"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.640224 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ff49e6e-949d-4f1e-85d6-108fa5edfece-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.646300 4747 generic.go:334] "Generic (PLEG): container finished" podID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerID="c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681" exitCode=0 Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.646333 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ff49e6e-949d-4f1e-85d6-108fa5edfece","Type":"ContainerDied","Data":"c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681"} Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.646357 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3ff49e6e-949d-4f1e-85d6-108fa5edfece","Type":"ContainerDied","Data":"c0b367c345f0ec2fa8712826952568c18951be6b85b6724d16442fa4edafe57e"} Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.646410 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.680808 4747 scope.go:117] "RemoveContainer" containerID="72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" Feb 02 09:14:07 crc kubenswrapper[4747]: E0202 09:14:07.681324 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117\": container with ID starting with 72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117 not found: ID does not exist" containerID="72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.681351 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117"} err="failed to get container status \"72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117\": rpc error: code = NotFound desc = could not find container \"72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117\": container with ID starting with 72a7df24e5b694ec14d685a466ffc94ca443114a3da40f968266808dd539a117 not found: ID does not exist" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.681370 4747 scope.go:117] "RemoveContainer" containerID="c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.683388 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ff49e6e-949d-4f1e-85d6-108fa5edfece-kube-api-access-2g9nr" (OuterVolumeSpecName: "kube-api-access-2g9nr") pod "3ff49e6e-949d-4f1e-85d6-108fa5edfece" (UID: "3ff49e6e-949d-4f1e-85d6-108fa5edfece"). InnerVolumeSpecName "kube-api-access-2g9nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.696072 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ff49e6e-949d-4f1e-85d6-108fa5edfece" (UID: "3ff49e6e-949d-4f1e-85d6-108fa5edfece"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.713273 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-config-data" (OuterVolumeSpecName: "config-data") pod "3ff49e6e-949d-4f1e-85d6-108fa5edfece" (UID: "3ff49e6e-949d-4f1e-85d6-108fa5edfece"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.719889 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.726981 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.736541 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:07 crc kubenswrapper[4747]: E0202 09:14:07.736946 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-log" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.736963 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-log" Feb 02 09:14:07 crc kubenswrapper[4747]: E0202 09:14:07.736982 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a70c7a-5b74-4348-93e8-2b204ca16d11" containerName="nova-scheduler-scheduler" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.736988 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a70c7a-5b74-4348-93e8-2b204ca16d11" containerName="nova-scheduler-scheduler" Feb 02 09:14:07 crc kubenswrapper[4747]: E0202 09:14:07.737003 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-api" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.737008 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-api" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.737174 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-api" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.737188 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a70c7a-5b74-4348-93e8-2b204ca16d11" containerName="nova-scheduler-scheduler" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.737202 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" containerName="nova-api-log" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.737888 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.739751 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.741655 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.741682 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g9nr\" (UniqueName: \"kubernetes.io/projected/3ff49e6e-949d-4f1e-85d6-108fa5edfece-kube-api-access-2g9nr\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.741693 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ff49e6e-949d-4f1e-85d6-108fa5edfece-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.752137 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.776800 4747 scope.go:117] "RemoveContainer" containerID="ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.809246 4747 scope.go:117] "RemoveContainer" containerID="c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681" Feb 02 09:14:07 crc kubenswrapper[4747]: E0202 09:14:07.809720 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681\": container with ID starting with c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681 not found: ID does not exist" containerID="c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.809756 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681"} err="failed to get container status \"c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681\": rpc error: code = NotFound desc = could not find container \"c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681\": container with ID starting with c3c7f4f4b278f205c44073168e14765123f783115f5e491d6aabead97c2ae681 not found: ID does not exist" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.809800 4747 scope.go:117] "RemoveContainer" containerID="ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82" Feb 02 09:14:07 crc kubenswrapper[4747]: E0202 09:14:07.810483 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82\": container with ID starting with ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82 not found: ID does not exist" containerID="ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.810512 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82"} err="failed to get container status \"ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82\": rpc error: code = NotFound desc = could not find container \"ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82\": container with ID starting with ecf86e0c9ccf84316df45402029912a390e7891e67575c2068d311184a568b82 not found: ID does not exist" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.843511 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-845wr\" (UniqueName: \"kubernetes.io/projected/dae66685-4314-4edd-9446-dcd2fe84f777-kube-api-access-845wr\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.843627 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.843658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-config-data\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.947404 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-845wr\" (UniqueName: \"kubernetes.io/projected/dae66685-4314-4edd-9446-dcd2fe84f777-kube-api-access-845wr\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.947613 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.947657 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-config-data\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.960567 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-config-data\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.963695 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:07 crc kubenswrapper[4747]: I0202 09:14:07.970110 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-845wr\" (UniqueName: \"kubernetes.io/projected/dae66685-4314-4edd-9446-dcd2fe84f777-kube-api-access-845wr\") pod \"nova-scheduler-0\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.058190 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.065903 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.073785 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.087854 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.089852 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.094173 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.122461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.155882 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glvx2\" (UniqueName: \"kubernetes.io/projected/dc2d2533-b950-4276-917c-d33693cf6785-kube-api-access-glvx2\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.155952 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.155987 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-config-data\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.156090 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2d2533-b950-4276-917c-d33693cf6785-logs\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.257744 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2d2533-b950-4276-917c-d33693cf6785-logs\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.257858 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glvx2\" (UniqueName: \"kubernetes.io/projected/dc2d2533-b950-4276-917c-d33693cf6785-kube-api-access-glvx2\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.257908 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.257964 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-config-data\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.260728 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2d2533-b950-4276-917c-d33693cf6785-logs\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.264633 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-config-data\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.265102 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.276867 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.277285 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.279161 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glvx2\" (UniqueName: \"kubernetes.io/projected/dc2d2533-b950-4276-917c-d33693cf6785-kube-api-access-glvx2\") pod \"nova-api-0\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.354376 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15a70c7a-5b74-4348-93e8-2b204ca16d11" path="/var/lib/kubelet/pods/15a70c7a-5b74-4348-93e8-2b204ca16d11/volumes" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.355103 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ff49e6e-949d-4f1e-85d6-108fa5edfece" path="/var/lib/kubelet/pods/3ff49e6e-949d-4f1e-85d6-108fa5edfece/volumes" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.450538 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.580314 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:08 crc kubenswrapper[4747]: W0202 09:14:08.586212 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddae66685_4314_4edd_9446_dcd2fe84f777.slice/crio-430b01b30ea94c1bab0ffcb6b32499451d35f4bcde1ac17c7976437b17e57695 WatchSource:0}: Error finding container 430b01b30ea94c1bab0ffcb6b32499451d35f4bcde1ac17c7976437b17e57695: Status 404 returned error can't find the container with id 430b01b30ea94c1bab0ffcb6b32499451d35f4bcde1ac17c7976437b17e57695 Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.656379 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dae66685-4314-4edd-9446-dcd2fe84f777","Type":"ContainerStarted","Data":"430b01b30ea94c1bab0ffcb6b32499451d35f4bcde1ac17c7976437b17e57695"} Feb 02 09:14:08 crc kubenswrapper[4747]: W0202 09:14:08.924284 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc2d2533_b950_4276_917c_d33693cf6785.slice/crio-04ee13d985c1d0fae836500a292e319546fb9e213cbef3f4290338c2ddc3d8bb WatchSource:0}: Error finding container 04ee13d985c1d0fae836500a292e319546fb9e213cbef3f4290338c2ddc3d8bb: Status 404 returned error can't find the container with id 04ee13d985c1d0fae836500a292e319546fb9e213cbef3f4290338c2ddc3d8bb Feb 02 09:14:08 crc kubenswrapper[4747]: I0202 09:14:08.938305 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:09 crc kubenswrapper[4747]: I0202 09:14:09.669681 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dae66685-4314-4edd-9446-dcd2fe84f777","Type":"ContainerStarted","Data":"981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647"} Feb 02 09:14:09 crc kubenswrapper[4747]: I0202 09:14:09.671348 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc2d2533-b950-4276-917c-d33693cf6785","Type":"ContainerStarted","Data":"abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8"} Feb 02 09:14:09 crc kubenswrapper[4747]: I0202 09:14:09.671391 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc2d2533-b950-4276-917c-d33693cf6785","Type":"ContainerStarted","Data":"baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5"} Feb 02 09:14:09 crc kubenswrapper[4747]: I0202 09:14:09.671407 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc2d2533-b950-4276-917c-d33693cf6785","Type":"ContainerStarted","Data":"04ee13d985c1d0fae836500a292e319546fb9e213cbef3f4290338c2ddc3d8bb"} Feb 02 09:14:09 crc kubenswrapper[4747]: I0202 09:14:09.699590 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.699573389 podStartE2EDuration="2.699573389s" podCreationTimestamp="2026-02-02 09:14:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:09.689398167 +0000 UTC m=+1062.233736600" watchObservedRunningTime="2026-02-02 09:14:09.699573389 +0000 UTC m=+1062.243911812" Feb 02 09:14:09 crc kubenswrapper[4747]: I0202 09:14:09.723257 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.7232299279999999 podStartE2EDuration="1.723229928s" podCreationTimestamp="2026-02-02 09:14:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:09.710035118 +0000 UTC m=+1062.254373551" watchObservedRunningTime="2026-02-02 09:14:09.723229928 +0000 UTC m=+1062.267568381" Feb 02 09:14:13 crc kubenswrapper[4747]: I0202 09:14:13.073845 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 09:14:13 crc kubenswrapper[4747]: I0202 09:14:13.277662 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 09:14:13 crc kubenswrapper[4747]: I0202 09:14:13.278008 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 09:14:13 crc kubenswrapper[4747]: I0202 09:14:13.979133 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 02 09:14:14 crc kubenswrapper[4747]: I0202 09:14:14.292068 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:14 crc kubenswrapper[4747]: I0202 09:14:14.292160 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:18 crc kubenswrapper[4747]: I0202 09:14:18.074259 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 09:14:18 crc kubenswrapper[4747]: I0202 09:14:18.107888 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 09:14:18 crc kubenswrapper[4747]: I0202 09:14:18.451291 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 09:14:18 crc kubenswrapper[4747]: I0202 09:14:18.451398 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 09:14:18 crc kubenswrapper[4747]: I0202 09:14:18.783189 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 09:14:19 crc kubenswrapper[4747]: I0202 09:14:19.533160 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:19 crc kubenswrapper[4747]: I0202 09:14:19.533193 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:23 crc kubenswrapper[4747]: I0202 09:14:23.289022 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 09:14:23 crc kubenswrapper[4747]: I0202 09:14:23.291420 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 09:14:23 crc kubenswrapper[4747]: I0202 09:14:23.298787 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 09:14:23 crc kubenswrapper[4747]: I0202 09:14:23.801424 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.799226 4747 generic.go:334] "Generic (PLEG): container finished" podID="d2182862-d41c-4cc6-a47f-d67738c10793" containerID="680fc39bdc18300762da33ddc18b525ece6d207ed4ea281091cc9ae502680fbe" exitCode=137 Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.800412 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d2182862-d41c-4cc6-a47f-d67738c10793","Type":"ContainerDied","Data":"680fc39bdc18300762da33ddc18b525ece6d207ed4ea281091cc9ae502680fbe"} Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.800440 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d2182862-d41c-4cc6-a47f-d67738c10793","Type":"ContainerDied","Data":"a2d00ef009db0de942cd4776aacce9f51c8c1c7622ee8dbb95ebd86397e3684b"} Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.800454 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2d00ef009db0de942cd4776aacce9f51c8c1c7622ee8dbb95ebd86397e3684b" Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.864417 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.970114 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-config-data\") pod \"d2182862-d41c-4cc6-a47f-d67738c10793\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.970222 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-combined-ca-bundle\") pod \"d2182862-d41c-4cc6-a47f-d67738c10793\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.970550 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwls5\" (UniqueName: \"kubernetes.io/projected/d2182862-d41c-4cc6-a47f-d67738c10793-kube-api-access-wwls5\") pod \"d2182862-d41c-4cc6-a47f-d67738c10793\" (UID: \"d2182862-d41c-4cc6-a47f-d67738c10793\") " Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.975668 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2182862-d41c-4cc6-a47f-d67738c10793-kube-api-access-wwls5" (OuterVolumeSpecName: "kube-api-access-wwls5") pod "d2182862-d41c-4cc6-a47f-d67738c10793" (UID: "d2182862-d41c-4cc6-a47f-d67738c10793"). InnerVolumeSpecName "kube-api-access-wwls5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.996995 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-config-data" (OuterVolumeSpecName: "config-data") pod "d2182862-d41c-4cc6-a47f-d67738c10793" (UID: "d2182862-d41c-4cc6-a47f-d67738c10793"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:24 crc kubenswrapper[4747]: I0202 09:14:24.997304 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2182862-d41c-4cc6-a47f-d67738c10793" (UID: "d2182862-d41c-4cc6-a47f-d67738c10793"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.072796 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.072829 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2182862-d41c-4cc6-a47f-d67738c10793-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.072840 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwls5\" (UniqueName: \"kubernetes.io/projected/d2182862-d41c-4cc6-a47f-d67738c10793-kube-api-access-wwls5\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.102858 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.810826 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.848976 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.858587 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.884476 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:14:25 crc kubenswrapper[4747]: E0202 09:14:25.885024 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2182862-d41c-4cc6-a47f-d67738c10793" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.885051 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2182862-d41c-4cc6-a47f-d67738c10793" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.885264 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2182862-d41c-4cc6-a47f-d67738c10793" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.886056 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.892354 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.894651 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.895102 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.895226 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.993989 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.994134 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.994164 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.994226 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:25 crc kubenswrapper[4747]: I0202 09:14:25.994278 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2frkl\" (UniqueName: \"kubernetes.io/projected/329eee3d-3827-486f-bd32-f72c288610a2-kube-api-access-2frkl\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.097064 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.097153 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2frkl\" (UniqueName: \"kubernetes.io/projected/329eee3d-3827-486f-bd32-f72c288610a2-kube-api-access-2frkl\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.097434 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.097494 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.097565 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.102783 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.102830 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.103969 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.104606 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/329eee3d-3827-486f-bd32-f72c288610a2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.140398 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2frkl\" (UniqueName: \"kubernetes.io/projected/329eee3d-3827-486f-bd32-f72c288610a2-kube-api-access-2frkl\") pod \"nova-cell1-novncproxy-0\" (UID: \"329eee3d-3827-486f-bd32-f72c288610a2\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.237508 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.356310 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2182862-d41c-4cc6-a47f-d67738c10793" path="/var/lib/kubelet/pods/d2182862-d41c-4cc6-a47f-d67738c10793/volumes" Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.690060 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 09:14:26 crc kubenswrapper[4747]: I0202 09:14:26.835155 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"329eee3d-3827-486f-bd32-f72c288610a2","Type":"ContainerStarted","Data":"f5e8748eff68a615f7ece4457e86716468415dbf16a9045654044d585d9e3aee"} Feb 02 09:14:27 crc kubenswrapper[4747]: I0202 09:14:27.844986 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"329eee3d-3827-486f-bd32-f72c288610a2","Type":"ContainerStarted","Data":"aa8be3d00fbc85e22498fa02eb4719c55f759b78ceaaa92c7e4797af4f963b40"} Feb 02 09:14:27 crc kubenswrapper[4747]: I0202 09:14:27.872634 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.872615047 podStartE2EDuration="2.872615047s" podCreationTimestamp="2026-02-02 09:14:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:27.862993789 +0000 UTC m=+1080.407332232" watchObservedRunningTime="2026-02-02 09:14:27.872615047 +0000 UTC m=+1080.416953480" Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.515321 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.516901 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.521162 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.527618 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.843275 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.843502 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="b43bea3c-d709-4f84-a052-a9b2500eaa8a" containerName="kube-state-metrics" containerID="cri-o://ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4" gracePeriod=30 Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.854212 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 09:14:28 crc kubenswrapper[4747]: I0202 09:14:28.863341 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.060595 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-w57fn"] Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.066879 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.095070 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-w57fn"] Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.161637 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.161810 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7grpj\" (UniqueName: \"kubernetes.io/projected/2fea7ef4-c513-4ded-93f4-689653dc8990-kube-api-access-7grpj\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.161843 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-config\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.161868 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.162010 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.162041 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.299501 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.299761 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7grpj\" (UniqueName: \"kubernetes.io/projected/2fea7ef4-c513-4ded-93f4-689653dc8990-kube-api-access-7grpj\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.299810 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-config\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.299838 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.304229 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.304535 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-config\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.315327 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.328497 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.328551 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.329224 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.329580 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.348927 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7grpj\" (UniqueName: \"kubernetes.io/projected/2fea7ef4-c513-4ded-93f4-689653dc8990-kube-api-access-7grpj\") pod \"dnsmasq-dns-59cf4bdb65-w57fn\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.400219 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.482880 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.634958 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrmkm\" (UniqueName: \"kubernetes.io/projected/b43bea3c-d709-4f84-a052-a9b2500eaa8a-kube-api-access-rrmkm\") pod \"b43bea3c-d709-4f84-a052-a9b2500eaa8a\" (UID: \"b43bea3c-d709-4f84-a052-a9b2500eaa8a\") " Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.641329 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b43bea3c-d709-4f84-a052-a9b2500eaa8a-kube-api-access-rrmkm" (OuterVolumeSpecName: "kube-api-access-rrmkm") pod "b43bea3c-d709-4f84-a052-a9b2500eaa8a" (UID: "b43bea3c-d709-4f84-a052-a9b2500eaa8a"). InnerVolumeSpecName "kube-api-access-rrmkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.738245 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrmkm\" (UniqueName: \"kubernetes.io/projected/b43bea3c-d709-4f84-a052-a9b2500eaa8a-kube-api-access-rrmkm\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.862373 4747 generic.go:334] "Generic (PLEG): container finished" podID="b43bea3c-d709-4f84-a052-a9b2500eaa8a" containerID="ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4" exitCode=2 Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.862439 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.862459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b43bea3c-d709-4f84-a052-a9b2500eaa8a","Type":"ContainerDied","Data":"ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4"} Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.862793 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b43bea3c-d709-4f84-a052-a9b2500eaa8a","Type":"ContainerDied","Data":"0f12f5fd884f3646f42cf1242a636d661a0331bf1c9715fd7d4a4ba28036c52f"} Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.862834 4747 scope.go:117] "RemoveContainer" containerID="ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.894861 4747 scope.go:117] "RemoveContainer" containerID="ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4" Feb 02 09:14:29 crc kubenswrapper[4747]: E0202 09:14:29.895362 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4\": container with ID starting with ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4 not found: ID does not exist" containerID="ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.895411 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4"} err="failed to get container status \"ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4\": rpc error: code = NotFound desc = could not find container \"ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4\": container with ID starting with ecaf03340f50859f9155331d36759c4d1bd40e0793c3532e38c84b15de89aca4 not found: ID does not exist" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.896479 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.903129 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.914434 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:14:29 crc kubenswrapper[4747]: E0202 09:14:29.914807 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b43bea3c-d709-4f84-a052-a9b2500eaa8a" containerName="kube-state-metrics" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.914820 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b43bea3c-d709-4f84-a052-a9b2500eaa8a" containerName="kube-state-metrics" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.915017 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b43bea3c-d709-4f84-a052-a9b2500eaa8a" containerName="kube-state-metrics" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.915605 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.917652 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.917896 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.932440 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:14:29 crc kubenswrapper[4747]: I0202 09:14:29.971972 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-w57fn"] Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.049131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.049402 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.049503 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.049637 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltlgn\" (UniqueName: \"kubernetes.io/projected/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-api-access-ltlgn\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.151057 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.151119 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.151171 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.151217 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltlgn\" (UniqueName: \"kubernetes.io/projected/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-api-access-ltlgn\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.156140 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.164754 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.166050 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.185659 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltlgn\" (UniqueName: \"kubernetes.io/projected/634a083b-144a-4785-b8d2-aa33c1b9c423-kube-api-access-ltlgn\") pod \"kube-state-metrics-0\" (UID: \"634a083b-144a-4785-b8d2-aa33c1b9c423\") " pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.246411 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.367580 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b43bea3c-d709-4f84-a052-a9b2500eaa8a" path="/var/lib/kubelet/pods/b43bea3c-d709-4f84-a052-a9b2500eaa8a/volumes" Feb 02 09:14:30 crc kubenswrapper[4747]: W0202 09:14:30.731874 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod634a083b_144a_4785_b8d2_aa33c1b9c423.slice/crio-2b78d40724b029c53044e12a11da4deba2a8b2d2ca4292a142affb31b2e810c0 WatchSource:0}: Error finding container 2b78d40724b029c53044e12a11da4deba2a8b2d2ca4292a142affb31b2e810c0: Status 404 returned error can't find the container with id 2b78d40724b029c53044e12a11da4deba2a8b2d2ca4292a142affb31b2e810c0 Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.732815 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.872108 4747 generic.go:334] "Generic (PLEG): container finished" podID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerID="cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a" exitCode=0 Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.872172 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" event={"ID":"2fea7ef4-c513-4ded-93f4-689653dc8990","Type":"ContainerDied","Data":"cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a"} Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.872197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" event={"ID":"2fea7ef4-c513-4ded-93f4-689653dc8990","Type":"ContainerStarted","Data":"1873795ebd5197cf0282511b55c0ac7e618928f103ae556a74621a4cb6502432"} Feb 02 09:14:30 crc kubenswrapper[4747]: I0202 09:14:30.873871 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"634a083b-144a-4785-b8d2-aa33c1b9c423","Type":"ContainerStarted","Data":"2b78d40724b029c53044e12a11da4deba2a8b2d2ca4292a142affb31b2e810c0"} Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.156231 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.156605 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-central-agent" containerID="cri-o://8b38a93253684a765f5fb4dabb6108a401bcb36e4970c8e244a2190219ed0a33" gracePeriod=30 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.157157 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="proxy-httpd" containerID="cri-o://6cdb4b94ce2e2b7f0f7d30352ff2ddf611d3f4c68e40d95363f7ceb110274cc8" gracePeriod=30 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.157223 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="sg-core" containerID="cri-o://15fb323e7e0868754ec715d8afbd9c2a7c9996ab93abd2b8f1402fbf6d10d250" gracePeriod=30 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.157271 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-notification-agent" containerID="cri-o://db89047dae02a55848379f48c04278e431de56457150cd0068a6f37a09f8e660" gracePeriod=30 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.237958 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.832037 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.885629 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" event={"ID":"2fea7ef4-c513-4ded-93f4-689653dc8990","Type":"ContainerStarted","Data":"b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce"} Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.885776 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.888820 4747 generic.go:334] "Generic (PLEG): container finished" podID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerID="6cdb4b94ce2e2b7f0f7d30352ff2ddf611d3f4c68e40d95363f7ceb110274cc8" exitCode=0 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.888856 4747 generic.go:334] "Generic (PLEG): container finished" podID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerID="15fb323e7e0868754ec715d8afbd9c2a7c9996ab93abd2b8f1402fbf6d10d250" exitCode=2 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.888864 4747 generic.go:334] "Generic (PLEG): container finished" podID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerID="8b38a93253684a765f5fb4dabb6108a401bcb36e4970c8e244a2190219ed0a33" exitCode=0 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.888915 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerDied","Data":"6cdb4b94ce2e2b7f0f7d30352ff2ddf611d3f4c68e40d95363f7ceb110274cc8"} Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.888976 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerDied","Data":"15fb323e7e0868754ec715d8afbd9c2a7c9996ab93abd2b8f1402fbf6d10d250"} Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.888991 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerDied","Data":"8b38a93253684a765f5fb4dabb6108a401bcb36e4970c8e244a2190219ed0a33"} Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.890969 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"634a083b-144a-4785-b8d2-aa33c1b9c423","Type":"ContainerStarted","Data":"b20cce9bb98a962bff6952cf4bb6fedae3758b84f5afc60d7785577e1aa028e2"} Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.891041 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.891241 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-log" containerID="cri-o://baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5" gracePeriod=30 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.891321 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-api" containerID="cri-o://abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8" gracePeriod=30 Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.921470 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" podStartSLOduration=2.921452151 podStartE2EDuration="2.921452151s" podCreationTimestamp="2026-02-02 09:14:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:31.913024934 +0000 UTC m=+1084.457363387" watchObservedRunningTime="2026-02-02 09:14:31.921452151 +0000 UTC m=+1084.465790594" Feb 02 09:14:31 crc kubenswrapper[4747]: I0202 09:14:31.944997 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.474272336 podStartE2EDuration="2.944970426s" podCreationTimestamp="2026-02-02 09:14:29 +0000 UTC" firstStartedPulling="2026-02-02 09:14:30.734539649 +0000 UTC m=+1083.278878072" lastFinishedPulling="2026-02-02 09:14:31.205237729 +0000 UTC m=+1083.749576162" observedRunningTime="2026-02-02 09:14:31.931762186 +0000 UTC m=+1084.476100639" watchObservedRunningTime="2026-02-02 09:14:31.944970426 +0000 UTC m=+1084.489308879" Feb 02 09:14:32 crc kubenswrapper[4747]: I0202 09:14:32.901627 4747 generic.go:334] "Generic (PLEG): container finished" podID="dc2d2533-b950-4276-917c-d33693cf6785" containerID="baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5" exitCode=143 Feb 02 09:14:32 crc kubenswrapper[4747]: I0202 09:14:32.901706 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc2d2533-b950-4276-917c-d33693cf6785","Type":"ContainerDied","Data":"baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5"} Feb 02 09:14:34 crc kubenswrapper[4747]: I0202 09:14:34.927017 4747 generic.go:334] "Generic (PLEG): container finished" podID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerID="db89047dae02a55848379f48c04278e431de56457150cd0068a6f37a09f8e660" exitCode=0 Feb 02 09:14:34 crc kubenswrapper[4747]: I0202 09:14:34.927377 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerDied","Data":"db89047dae02a55848379f48c04278e431de56457150cd0068a6f37a09f8e660"} Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.159338 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.275746 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-config-data\") pod \"23da7215-a6e3-49e7-9c46-c20e3963f70a\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.275911 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-combined-ca-bundle\") pod \"23da7215-a6e3-49e7-9c46-c20e3963f70a\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.275990 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-run-httpd\") pod \"23da7215-a6e3-49e7-9c46-c20e3963f70a\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.276051 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-sg-core-conf-yaml\") pod \"23da7215-a6e3-49e7-9c46-c20e3963f70a\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.276096 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-log-httpd\") pod \"23da7215-a6e3-49e7-9c46-c20e3963f70a\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.276129 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-scripts\") pod \"23da7215-a6e3-49e7-9c46-c20e3963f70a\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.276232 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhp5k\" (UniqueName: \"kubernetes.io/projected/23da7215-a6e3-49e7-9c46-c20e3963f70a-kube-api-access-dhp5k\") pod \"23da7215-a6e3-49e7-9c46-c20e3963f70a\" (UID: \"23da7215-a6e3-49e7-9c46-c20e3963f70a\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.277479 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "23da7215-a6e3-49e7-9c46-c20e3963f70a" (UID: "23da7215-a6e3-49e7-9c46-c20e3963f70a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.277493 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "23da7215-a6e3-49e7-9c46-c20e3963f70a" (UID: "23da7215-a6e3-49e7-9c46-c20e3963f70a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.284798 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-scripts" (OuterVolumeSpecName: "scripts") pod "23da7215-a6e3-49e7-9c46-c20e3963f70a" (UID: "23da7215-a6e3-49e7-9c46-c20e3963f70a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.295199 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23da7215-a6e3-49e7-9c46-c20e3963f70a-kube-api-access-dhp5k" (OuterVolumeSpecName: "kube-api-access-dhp5k") pod "23da7215-a6e3-49e7-9c46-c20e3963f70a" (UID: "23da7215-a6e3-49e7-9c46-c20e3963f70a"). InnerVolumeSpecName "kube-api-access-dhp5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.349514 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "23da7215-a6e3-49e7-9c46-c20e3963f70a" (UID: "23da7215-a6e3-49e7-9c46-c20e3963f70a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.381932 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.382276 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.382343 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23da7215-a6e3-49e7-9c46-c20e3963f70a-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.382415 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.382480 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhp5k\" (UniqueName: \"kubernetes.io/projected/23da7215-a6e3-49e7-9c46-c20e3963f70a-kube-api-access-dhp5k\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.400082 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23da7215-a6e3-49e7-9c46-c20e3963f70a" (UID: "23da7215-a6e3-49e7-9c46-c20e3963f70a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.427738 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.429762 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-config-data" (OuterVolumeSpecName: "config-data") pod "23da7215-a6e3-49e7-9c46-c20e3963f70a" (UID: "23da7215-a6e3-49e7-9c46-c20e3963f70a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.484670 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.484701 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23da7215-a6e3-49e7-9c46-c20e3963f70a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.585726 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2d2533-b950-4276-917c-d33693cf6785-logs\") pod \"dc2d2533-b950-4276-917c-d33693cf6785\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.586042 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glvx2\" (UniqueName: \"kubernetes.io/projected/dc2d2533-b950-4276-917c-d33693cf6785-kube-api-access-glvx2\") pod \"dc2d2533-b950-4276-917c-d33693cf6785\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.586106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-combined-ca-bundle\") pod \"dc2d2533-b950-4276-917c-d33693cf6785\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.586141 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc2d2533-b950-4276-917c-d33693cf6785-logs" (OuterVolumeSpecName: "logs") pod "dc2d2533-b950-4276-917c-d33693cf6785" (UID: "dc2d2533-b950-4276-917c-d33693cf6785"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.586150 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-config-data\") pod \"dc2d2533-b950-4276-917c-d33693cf6785\" (UID: \"dc2d2533-b950-4276-917c-d33693cf6785\") " Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.587216 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc2d2533-b950-4276-917c-d33693cf6785-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.592546 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc2d2533-b950-4276-917c-d33693cf6785-kube-api-access-glvx2" (OuterVolumeSpecName: "kube-api-access-glvx2") pod "dc2d2533-b950-4276-917c-d33693cf6785" (UID: "dc2d2533-b950-4276-917c-d33693cf6785"). InnerVolumeSpecName "kube-api-access-glvx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.621868 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-config-data" (OuterVolumeSpecName: "config-data") pod "dc2d2533-b950-4276-917c-d33693cf6785" (UID: "dc2d2533-b950-4276-917c-d33693cf6785"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.622390 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc2d2533-b950-4276-917c-d33693cf6785" (UID: "dc2d2533-b950-4276-917c-d33693cf6785"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.689794 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.689834 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc2d2533-b950-4276-917c-d33693cf6785-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.689847 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glvx2\" (UniqueName: \"kubernetes.io/projected/dc2d2533-b950-4276-917c-d33693cf6785-kube-api-access-glvx2\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.937333 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23da7215-a6e3-49e7-9c46-c20e3963f70a","Type":"ContainerDied","Data":"f734e9860ba85ae497c175e62bbdd6c2c410b6c9adb4fbfed4627c2161059c42"} Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.937384 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.938136 4747 scope.go:117] "RemoveContainer" containerID="6cdb4b94ce2e2b7f0f7d30352ff2ddf611d3f4c68e40d95363f7ceb110274cc8" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.942747 4747 generic.go:334] "Generic (PLEG): container finished" podID="dc2d2533-b950-4276-917c-d33693cf6785" containerID="abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8" exitCode=0 Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.942788 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.942793 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc2d2533-b950-4276-917c-d33693cf6785","Type":"ContainerDied","Data":"abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8"} Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.942955 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc2d2533-b950-4276-917c-d33693cf6785","Type":"ContainerDied","Data":"04ee13d985c1d0fae836500a292e319546fb9e213cbef3f4290338c2ddc3d8bb"} Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.964672 4747 scope.go:117] "RemoveContainer" containerID="15fb323e7e0868754ec715d8afbd9c2a7c9996ab93abd2b8f1402fbf6d10d250" Feb 02 09:14:35 crc kubenswrapper[4747]: I0202 09:14:35.985607 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.018282 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.018995 4747 scope.go:117] "RemoveContainer" containerID="db89047dae02a55848379f48c04278e431de56457150cd0068a6f37a09f8e660" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.036780 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.072481 4747 scope.go:117] "RemoveContainer" containerID="8b38a93253684a765f5fb4dabb6108a401bcb36e4970c8e244a2190219ed0a33" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.098877 4747 scope.go:117] "RemoveContainer" containerID="abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.103981 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.114431 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.114772 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="sg-core" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.114785 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="sg-core" Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.114798 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="proxy-httpd" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.114805 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="proxy-httpd" Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.114818 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-log" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.114825 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-log" Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.114832 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-central-agent" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.114837 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-central-agent" Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.114847 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-api" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.114852 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-api" Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.114866 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-notification-agent" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.114872 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-notification-agent" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.115040 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="sg-core" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.115050 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="proxy-httpd" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.115065 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-log" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.115078 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2d2533-b950-4276-917c-d33693cf6785" containerName="nova-api-api" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.115086 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-central-agent" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.115096 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" containerName="ceilometer-notification-agent" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.117299 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.118505 4747 scope.go:117] "RemoveContainer" containerID="baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.119268 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.119390 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.119675 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.121759 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.123824 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.126496 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.126638 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.126748 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.132759 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.141475 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.149112 4747 scope.go:117] "RemoveContainer" containerID="abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8" Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.149564 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8\": container with ID starting with abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8 not found: ID does not exist" containerID="abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.149595 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8"} err="failed to get container status \"abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8\": rpc error: code = NotFound desc = could not find container \"abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8\": container with ID starting with abef4a72aa37e1cacd44fb70adb8c4620028a48e5179f8b432de9ffdc35243c8 not found: ID does not exist" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.149615 4747 scope.go:117] "RemoveContainer" containerID="baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5" Feb 02 09:14:36 crc kubenswrapper[4747]: E0202 09:14:36.149915 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5\": container with ID starting with baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5 not found: ID does not exist" containerID="baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.150033 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5"} err="failed to get container status \"baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5\": rpc error: code = NotFound desc = could not find container \"baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5\": container with ID starting with baca85182d5072c3161f26a82860dbfbcd13851d514e7c14e86a775481eb9fc5 not found: ID does not exist" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.238429 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.257018 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.302805 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aaac5a6d-0200-4405-bf3f-d9e46177cc05-log-httpd\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.302867 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-public-tls-certs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.302955 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aaac5a6d-0200-4405-bf3f-d9e46177cc05-run-httpd\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.302978 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ph2j\" (UniqueName: \"kubernetes.io/projected/325ebd33-a624-410a-bde9-cac6f02e9ddc-kube-api-access-7ph2j\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.303823 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-config-data\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.303993 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-scripts\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304187 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-config-data\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304271 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304305 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffrgz\" (UniqueName: \"kubernetes.io/projected/aaac5a6d-0200-4405-bf3f-d9e46177cc05-kube-api-access-ffrgz\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304338 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304366 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325ebd33-a624-410a-bde9-cac6f02e9ddc-logs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304382 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304400 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.304529 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.352384 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23da7215-a6e3-49e7-9c46-c20e3963f70a" path="/var/lib/kubelet/pods/23da7215-a6e3-49e7-9c46-c20e3963f70a/volumes" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.353888 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc2d2533-b950-4276-917c-d33693cf6785" path="/var/lib/kubelet/pods/dc2d2533-b950-4276-917c-d33693cf6785/volumes" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407167 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-config-data\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407241 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-scripts\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407271 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-config-data\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407303 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffrgz\" (UniqueName: \"kubernetes.io/projected/aaac5a6d-0200-4405-bf3f-d9e46177cc05-kube-api-access-ffrgz\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407323 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407347 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407376 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325ebd33-a624-410a-bde9-cac6f02e9ddc-logs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407394 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407418 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407461 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407522 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aaac5a6d-0200-4405-bf3f-d9e46177cc05-log-httpd\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407559 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-public-tls-certs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407705 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aaac5a6d-0200-4405-bf3f-d9e46177cc05-run-httpd\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.407726 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ph2j\" (UniqueName: \"kubernetes.io/projected/325ebd33-a624-410a-bde9-cac6f02e9ddc-kube-api-access-7ph2j\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.409661 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aaac5a6d-0200-4405-bf3f-d9e46177cc05-log-httpd\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.410348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aaac5a6d-0200-4405-bf3f-d9e46177cc05-run-httpd\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.410723 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325ebd33-a624-410a-bde9-cac6f02e9ddc-logs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.414167 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.414450 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-public-tls-certs\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.414844 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-config-data\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.414846 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.415525 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.415572 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.420782 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.425591 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaac5a6d-0200-4405-bf3f-d9e46177cc05-scripts\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.432341 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-config-data\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.433426 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ph2j\" (UniqueName: \"kubernetes.io/projected/325ebd33-a624-410a-bde9-cac6f02e9ddc-kube-api-access-7ph2j\") pod \"nova-api-0\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.440091 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffrgz\" (UniqueName: \"kubernetes.io/projected/aaac5a6d-0200-4405-bf3f-d9e46177cc05-kube-api-access-ffrgz\") pod \"ceilometer-0\" (UID: \"aaac5a6d-0200-4405-bf3f-d9e46177cc05\") " pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.442883 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.449887 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.940302 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.960719 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"325ebd33-a624-410a-bde9-cac6f02e9ddc","Type":"ContainerStarted","Data":"508ce1073bd95865482392b8a785c012268ed89ccfdbf6aec2fde36940ee308e"} Feb 02 09:14:36 crc kubenswrapper[4747]: I0202 09:14:36.980812 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.026515 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.178769 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-k5dtm"] Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.181104 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.182669 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.184145 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.191805 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-k5dtm"] Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.330457 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wb76\" (UniqueName: \"kubernetes.io/projected/c866e283-1f6a-4eb8-b03b-426ff2d875b9-kube-api-access-9wb76\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.330848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-config-data\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.331052 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-scripts\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.331107 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.432768 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wb76\" (UniqueName: \"kubernetes.io/projected/c866e283-1f6a-4eb8-b03b-426ff2d875b9-kube-api-access-9wb76\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.432834 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-config-data\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.433030 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-scripts\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.433099 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.437619 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-scripts\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.438893 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-config-data\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.439312 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.450409 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wb76\" (UniqueName: \"kubernetes.io/projected/c866e283-1f6a-4eb8-b03b-426ff2d875b9-kube-api-access-9wb76\") pod \"nova-cell1-cell-mapping-k5dtm\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.512237 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.973038 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aaac5a6d-0200-4405-bf3f-d9e46177cc05","Type":"ContainerStarted","Data":"0288d2b77715bfe33a645d935eee0a1f1db9d1eb14cda2d20b141bb394d31373"} Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.973300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aaac5a6d-0200-4405-bf3f-d9e46177cc05","Type":"ContainerStarted","Data":"803d6fe7986b860933515f7a46c98562c35dca0990108e238af576ef4988a772"} Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.976929 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-k5dtm"] Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.982306 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"325ebd33-a624-410a-bde9-cac6f02e9ddc","Type":"ContainerStarted","Data":"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c"} Feb 02 09:14:37 crc kubenswrapper[4747]: I0202 09:14:37.982611 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"325ebd33-a624-410a-bde9-cac6f02e9ddc","Type":"ContainerStarted","Data":"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7"} Feb 02 09:14:38 crc kubenswrapper[4747]: I0202 09:14:38.015903 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.015883176 podStartE2EDuration="2.015883176s" podCreationTimestamp="2026-02-02 09:14:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:38.004423061 +0000 UTC m=+1090.548761494" watchObservedRunningTime="2026-02-02 09:14:38.015883176 +0000 UTC m=+1090.560221609" Feb 02 09:14:38 crc kubenswrapper[4747]: I0202 09:14:38.992193 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aaac5a6d-0200-4405-bf3f-d9e46177cc05","Type":"ContainerStarted","Data":"ab34929c2d932c82b62c02ffb980cfd3d87f1f9852c1149afb2ed6c1ef9b7d49"} Feb 02 09:14:38 crc kubenswrapper[4747]: I0202 09:14:38.995479 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k5dtm" event={"ID":"c866e283-1f6a-4eb8-b03b-426ff2d875b9","Type":"ContainerStarted","Data":"4def1b4fc92e8fddb94c429cf9d4e4f8696f0ecc10b2388ab77580298e087e57"} Feb 02 09:14:38 crc kubenswrapper[4747]: I0202 09:14:38.995521 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k5dtm" event={"ID":"c866e283-1f6a-4eb8-b03b-426ff2d875b9","Type":"ContainerStarted","Data":"f54dbde24460993500faa06785d8dc04fbbe3057bd6d173587407473f9b3c966"} Feb 02 09:14:39 crc kubenswrapper[4747]: I0202 09:14:39.013776 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-k5dtm" podStartSLOduration=2.013758041 podStartE2EDuration="2.013758041s" podCreationTimestamp="2026-02-02 09:14:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:39.011235366 +0000 UTC m=+1091.555573799" watchObservedRunningTime="2026-02-02 09:14:39.013758041 +0000 UTC m=+1091.558096474" Feb 02 09:14:39 crc kubenswrapper[4747]: I0202 09:14:39.403077 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:14:39 crc kubenswrapper[4747]: I0202 09:14:39.469177 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-65dxk"] Feb 02 09:14:39 crc kubenswrapper[4747]: I0202 09:14:39.469421 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerName="dnsmasq-dns" containerID="cri-o://815c772183b22ce0317b076c24ed835a1dc87da689f83d13ab6bb2efa2c3f06a" gracePeriod=10 Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.003991 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aaac5a6d-0200-4405-bf3f-d9e46177cc05","Type":"ContainerStarted","Data":"7c5b91cd89a42ef08f80fcd8a36e2c947b8872ac52d822e83788b7cb2dd18a34"} Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.011627 4747 generic.go:334] "Generic (PLEG): container finished" podID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerID="815c772183b22ce0317b076c24ed835a1dc87da689f83d13ab6bb2efa2c3f06a" exitCode=0 Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.013012 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" event={"ID":"a32c4798-c1f9-4856-a256-7c28fafa04af","Type":"ContainerDied","Data":"815c772183b22ce0317b076c24ed835a1dc87da689f83d13ab6bb2efa2c3f06a"} Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.013050 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" event={"ID":"a32c4798-c1f9-4856-a256-7c28fafa04af","Type":"ContainerDied","Data":"88ef9debc36544aacf72d443669c28d81dfefb9064d28cf06a0ed96d773ef5fd"} Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.013064 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88ef9debc36544aacf72d443669c28d81dfefb9064d28cf06a0ed96d773ef5fd" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.030693 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.102817 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-swift-storage-0\") pod \"a32c4798-c1f9-4856-a256-7c28fafa04af\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.102993 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-config\") pod \"a32c4798-c1f9-4856-a256-7c28fafa04af\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.103573 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4xdn\" (UniqueName: \"kubernetes.io/projected/a32c4798-c1f9-4856-a256-7c28fafa04af-kube-api-access-p4xdn\") pod \"a32c4798-c1f9-4856-a256-7c28fafa04af\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.103608 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-nb\") pod \"a32c4798-c1f9-4856-a256-7c28fafa04af\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.103659 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-svc\") pod \"a32c4798-c1f9-4856-a256-7c28fafa04af\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.103728 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-sb\") pod \"a32c4798-c1f9-4856-a256-7c28fafa04af\" (UID: \"a32c4798-c1f9-4856-a256-7c28fafa04af\") " Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.113168 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a32c4798-c1f9-4856-a256-7c28fafa04af-kube-api-access-p4xdn" (OuterVolumeSpecName: "kube-api-access-p4xdn") pod "a32c4798-c1f9-4856-a256-7c28fafa04af" (UID: "a32c4798-c1f9-4856-a256-7c28fafa04af"). InnerVolumeSpecName "kube-api-access-p4xdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.154856 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a32c4798-c1f9-4856-a256-7c28fafa04af" (UID: "a32c4798-c1f9-4856-a256-7c28fafa04af"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.181742 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a32c4798-c1f9-4856-a256-7c28fafa04af" (UID: "a32c4798-c1f9-4856-a256-7c28fafa04af"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.182310 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a32c4798-c1f9-4856-a256-7c28fafa04af" (UID: "a32c4798-c1f9-4856-a256-7c28fafa04af"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.184325 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-config" (OuterVolumeSpecName: "config") pod "a32c4798-c1f9-4856-a256-7c28fafa04af" (UID: "a32c4798-c1f9-4856-a256-7c28fafa04af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.190436 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a32c4798-c1f9-4856-a256-7c28fafa04af" (UID: "a32c4798-c1f9-4856-a256-7c28fafa04af"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.206479 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.206508 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4xdn\" (UniqueName: \"kubernetes.io/projected/a32c4798-c1f9-4856-a256-7c28fafa04af-kube-api-access-p4xdn\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.206526 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.206539 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.206546 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.206554 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a32c4798-c1f9-4856-a256-7c28fafa04af-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:40 crc kubenswrapper[4747]: I0202 09:14:40.262151 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 09:14:41 crc kubenswrapper[4747]: I0202 09:14:41.018529 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" Feb 02 09:14:41 crc kubenswrapper[4747]: I0202 09:14:41.044625 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-65dxk"] Feb 02 09:14:41 crc kubenswrapper[4747]: I0202 09:14:41.054046 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-65dxk"] Feb 02 09:14:42 crc kubenswrapper[4747]: I0202 09:14:42.029746 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aaac5a6d-0200-4405-bf3f-d9e46177cc05","Type":"ContainerStarted","Data":"3b5b76b5414dece702fc3eb3a9d9be89d4ba16a851b0d0154d86a16fcf4dc66b"} Feb 02 09:14:42 crc kubenswrapper[4747]: I0202 09:14:42.030455 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 09:14:42 crc kubenswrapper[4747]: I0202 09:14:42.060651 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.677884603 podStartE2EDuration="7.060632744s" podCreationTimestamp="2026-02-02 09:14:35 +0000 UTC" firstStartedPulling="2026-02-02 09:14:37.058494705 +0000 UTC m=+1089.602833138" lastFinishedPulling="2026-02-02 09:14:41.441242846 +0000 UTC m=+1093.985581279" observedRunningTime="2026-02-02 09:14:42.054845625 +0000 UTC m=+1094.599184078" watchObservedRunningTime="2026-02-02 09:14:42.060632744 +0000 UTC m=+1094.604971177" Feb 02 09:14:42 crc kubenswrapper[4747]: I0202 09:14:42.350009 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" path="/var/lib/kubelet/pods/a32c4798-c1f9-4856-a256-7c28fafa04af/volumes" Feb 02 09:14:43 crc kubenswrapper[4747]: I0202 09:14:43.041503 4747 generic.go:334] "Generic (PLEG): container finished" podID="c866e283-1f6a-4eb8-b03b-426ff2d875b9" containerID="4def1b4fc92e8fddb94c429cf9d4e4f8696f0ecc10b2388ab77580298e087e57" exitCode=0 Feb 02 09:14:43 crc kubenswrapper[4747]: I0202 09:14:43.041588 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k5dtm" event={"ID":"c866e283-1f6a-4eb8-b03b-426ff2d875b9","Type":"ContainerDied","Data":"4def1b4fc92e8fddb94c429cf9d4e4f8696f0ecc10b2388ab77580298e087e57"} Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.393481 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.498756 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-config-data\") pod \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.499020 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wb76\" (UniqueName: \"kubernetes.io/projected/c866e283-1f6a-4eb8-b03b-426ff2d875b9-kube-api-access-9wb76\") pod \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.499052 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-combined-ca-bundle\") pod \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.499260 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-scripts\") pod \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\" (UID: \"c866e283-1f6a-4eb8-b03b-426ff2d875b9\") " Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.506057 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-scripts" (OuterVolumeSpecName: "scripts") pod "c866e283-1f6a-4eb8-b03b-426ff2d875b9" (UID: "c866e283-1f6a-4eb8-b03b-426ff2d875b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.506682 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c866e283-1f6a-4eb8-b03b-426ff2d875b9-kube-api-access-9wb76" (OuterVolumeSpecName: "kube-api-access-9wb76") pod "c866e283-1f6a-4eb8-b03b-426ff2d875b9" (UID: "c866e283-1f6a-4eb8-b03b-426ff2d875b9"). InnerVolumeSpecName "kube-api-access-9wb76". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.527498 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c866e283-1f6a-4eb8-b03b-426ff2d875b9" (UID: "c866e283-1f6a-4eb8-b03b-426ff2d875b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.538842 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-config-data" (OuterVolumeSpecName: "config-data") pod "c866e283-1f6a-4eb8-b03b-426ff2d875b9" (UID: "c866e283-1f6a-4eb8-b03b-426ff2d875b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.601206 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.601246 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.601259 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wb76\" (UniqueName: \"kubernetes.io/projected/c866e283-1f6a-4eb8-b03b-426ff2d875b9-kube-api-access-9wb76\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.601273 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c866e283-1f6a-4eb8-b03b-426ff2d875b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:44 crc kubenswrapper[4747]: I0202 09:14:44.938298 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-845d6d6f59-65dxk" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.193:5353: i/o timeout" Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.066547 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k5dtm" event={"ID":"c866e283-1f6a-4eb8-b03b-426ff2d875b9","Type":"ContainerDied","Data":"f54dbde24460993500faa06785d8dc04fbbe3057bd6d173587407473f9b3c966"} Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.066613 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f54dbde24460993500faa06785d8dc04fbbe3057bd6d173587407473f9b3c966" Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.066739 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k5dtm" Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.252342 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.253023 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-log" containerID="cri-o://84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7" gracePeriod=30 Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.253127 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-api" containerID="cri-o://842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c" gracePeriod=30 Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.355535 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.355860 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-log" containerID="cri-o://d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925" gracePeriod=30 Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.355996 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-metadata" containerID="cri-o://2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723" gracePeriod=30 Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.368960 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.369216 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="dae66685-4314-4edd-9446-dcd2fe84f777" containerName="nova-scheduler-scheduler" containerID="cri-o://981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647" gracePeriod=30 Feb 02 09:14:45 crc kubenswrapper[4747]: I0202 09:14:45.830460 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.029841 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-public-tls-certs\") pod \"325ebd33-a624-410a-bde9-cac6f02e9ddc\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.030602 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-combined-ca-bundle\") pod \"325ebd33-a624-410a-bde9-cac6f02e9ddc\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.030847 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325ebd33-a624-410a-bde9-cac6f02e9ddc-logs\") pod \"325ebd33-a624-410a-bde9-cac6f02e9ddc\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.031103 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-config-data\") pod \"325ebd33-a624-410a-bde9-cac6f02e9ddc\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.031332 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/325ebd33-a624-410a-bde9-cac6f02e9ddc-logs" (OuterVolumeSpecName: "logs") pod "325ebd33-a624-410a-bde9-cac6f02e9ddc" (UID: "325ebd33-a624-410a-bde9-cac6f02e9ddc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.031588 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ph2j\" (UniqueName: \"kubernetes.io/projected/325ebd33-a624-410a-bde9-cac6f02e9ddc-kube-api-access-7ph2j\") pod \"325ebd33-a624-410a-bde9-cac6f02e9ddc\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.031853 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-internal-tls-certs\") pod \"325ebd33-a624-410a-bde9-cac6f02e9ddc\" (UID: \"325ebd33-a624-410a-bde9-cac6f02e9ddc\") " Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.032846 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/325ebd33-a624-410a-bde9-cac6f02e9ddc-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.037038 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/325ebd33-a624-410a-bde9-cac6f02e9ddc-kube-api-access-7ph2j" (OuterVolumeSpecName: "kube-api-access-7ph2j") pod "325ebd33-a624-410a-bde9-cac6f02e9ddc" (UID: "325ebd33-a624-410a-bde9-cac6f02e9ddc"). InnerVolumeSpecName "kube-api-access-7ph2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.057979 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "325ebd33-a624-410a-bde9-cac6f02e9ddc" (UID: "325ebd33-a624-410a-bde9-cac6f02e9ddc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.063737 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-config-data" (OuterVolumeSpecName: "config-data") pod "325ebd33-a624-410a-bde9-cac6f02e9ddc" (UID: "325ebd33-a624-410a-bde9-cac6f02e9ddc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.079422 4747 generic.go:334] "Generic (PLEG): container finished" podID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerID="d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925" exitCode=143 Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.079498 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"902a6326-e93d-4f54-b760-62f58b36f0f7","Type":"ContainerDied","Data":"d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925"} Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.085397 4747 generic.go:334] "Generic (PLEG): container finished" podID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerID="842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c" exitCode=0 Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.085438 4747 generic.go:334] "Generic (PLEG): container finished" podID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerID="84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7" exitCode=143 Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.085465 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"325ebd33-a624-410a-bde9-cac6f02e9ddc","Type":"ContainerDied","Data":"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c"} Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.085495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"325ebd33-a624-410a-bde9-cac6f02e9ddc","Type":"ContainerDied","Data":"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7"} Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.085509 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"325ebd33-a624-410a-bde9-cac6f02e9ddc","Type":"ContainerDied","Data":"508ce1073bd95865482392b8a785c012268ed89ccfdbf6aec2fde36940ee308e"} Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.085527 4747 scope.go:117] "RemoveContainer" containerID="842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.085691 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.091591 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "325ebd33-a624-410a-bde9-cac6f02e9ddc" (UID: "325ebd33-a624-410a-bde9-cac6f02e9ddc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.101848 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "325ebd33-a624-410a-bde9-cac6f02e9ddc" (UID: "325ebd33-a624-410a-bde9-cac6f02e9ddc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.127234 4747 scope.go:117] "RemoveContainer" containerID="84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.133801 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ph2j\" (UniqueName: \"kubernetes.io/projected/325ebd33-a624-410a-bde9-cac6f02e9ddc-kube-api-access-7ph2j\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.133830 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.133840 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.133848 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.133859 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/325ebd33-a624-410a-bde9-cac6f02e9ddc-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.156815 4747 scope.go:117] "RemoveContainer" containerID="842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c" Feb 02 09:14:46 crc kubenswrapper[4747]: E0202 09:14:46.157263 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c\": container with ID starting with 842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c not found: ID does not exist" containerID="842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.157293 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c"} err="failed to get container status \"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c\": rpc error: code = NotFound desc = could not find container \"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c\": container with ID starting with 842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c not found: ID does not exist" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.157312 4747 scope.go:117] "RemoveContainer" containerID="84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7" Feb 02 09:14:46 crc kubenswrapper[4747]: E0202 09:14:46.157675 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7\": container with ID starting with 84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7 not found: ID does not exist" containerID="84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.157698 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7"} err="failed to get container status \"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7\": rpc error: code = NotFound desc = could not find container \"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7\": container with ID starting with 84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7 not found: ID does not exist" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.157712 4747 scope.go:117] "RemoveContainer" containerID="842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.157909 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c"} err="failed to get container status \"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c\": rpc error: code = NotFound desc = could not find container \"842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c\": container with ID starting with 842d17efb81b22f1146e09c2b46dd83cbba4cb93517211add7092e9d680c8b7c not found: ID does not exist" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.157927 4747 scope.go:117] "RemoveContainer" containerID="84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.158382 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7"} err="failed to get container status \"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7\": rpc error: code = NotFound desc = could not find container \"84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7\": container with ID starting with 84a3fa34c88d901eb010ae16dfbedf0227e3192826a67b124c1196c5921888a7 not found: ID does not exist" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.465059 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.475765 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485260 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:46 crc kubenswrapper[4747]: E0202 09:14:46.485621 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerName="init" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485637 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerName="init" Feb 02 09:14:46 crc kubenswrapper[4747]: E0202 09:14:46.485654 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-log" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485660 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-log" Feb 02 09:14:46 crc kubenswrapper[4747]: E0202 09:14:46.485667 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-api" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485673 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-api" Feb 02 09:14:46 crc kubenswrapper[4747]: E0202 09:14:46.485694 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c866e283-1f6a-4eb8-b03b-426ff2d875b9" containerName="nova-manage" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485700 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="c866e283-1f6a-4eb8-b03b-426ff2d875b9" containerName="nova-manage" Feb 02 09:14:46 crc kubenswrapper[4747]: E0202 09:14:46.485707 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerName="dnsmasq-dns" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485713 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerName="dnsmasq-dns" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485884 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a32c4798-c1f9-4856-a256-7c28fafa04af" containerName="dnsmasq-dns" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485897 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="c866e283-1f6a-4eb8-b03b-426ff2d875b9" containerName="nova-manage" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485912 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-log" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.485923 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" containerName="nova-api-api" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.486789 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.489061 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.489985 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.490281 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.502576 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.644051 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.644092 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.644118 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-config-data\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.644156 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-public-tls-certs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.644205 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m84l\" (UniqueName: \"kubernetes.io/projected/3909e3d2-a56c-4a5e-ac77-d65f07d595de-kube-api-access-6m84l\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.644284 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3909e3d2-a56c-4a5e-ac77-d65f07d595de-logs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.745997 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.746059 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-config-data\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.746101 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-public-tls-certs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.746127 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m84l\" (UniqueName: \"kubernetes.io/projected/3909e3d2-a56c-4a5e-ac77-d65f07d595de-kube-api-access-6m84l\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.747071 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3909e3d2-a56c-4a5e-ac77-d65f07d595de-logs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.747187 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.747407 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3909e3d2-a56c-4a5e-ac77-d65f07d595de-logs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.751223 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.751348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-public-tls-certs\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.753100 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.753675 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3909e3d2-a56c-4a5e-ac77-d65f07d595de-config-data\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.767303 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m84l\" (UniqueName: \"kubernetes.io/projected/3909e3d2-a56c-4a5e-ac77-d65f07d595de-kube-api-access-6m84l\") pod \"nova-api-0\" (UID: \"3909e3d2-a56c-4a5e-ac77-d65f07d595de\") " pod="openstack/nova-api-0" Feb 02 09:14:46 crc kubenswrapper[4747]: I0202 09:14:46.827240 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.028012 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.057139 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-845wr\" (UniqueName: \"kubernetes.io/projected/dae66685-4314-4edd-9446-dcd2fe84f777-kube-api-access-845wr\") pod \"dae66685-4314-4edd-9446-dcd2fe84f777\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.057265 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-combined-ca-bundle\") pod \"dae66685-4314-4edd-9446-dcd2fe84f777\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.057363 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-config-data\") pod \"dae66685-4314-4edd-9446-dcd2fe84f777\" (UID: \"dae66685-4314-4edd-9446-dcd2fe84f777\") " Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.063816 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dae66685-4314-4edd-9446-dcd2fe84f777-kube-api-access-845wr" (OuterVolumeSpecName: "kube-api-access-845wr") pod "dae66685-4314-4edd-9446-dcd2fe84f777" (UID: "dae66685-4314-4edd-9446-dcd2fe84f777"). InnerVolumeSpecName "kube-api-access-845wr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.091465 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-config-data" (OuterVolumeSpecName: "config-data") pod "dae66685-4314-4edd-9446-dcd2fe84f777" (UID: "dae66685-4314-4edd-9446-dcd2fe84f777"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.100105 4747 generic.go:334] "Generic (PLEG): container finished" podID="dae66685-4314-4edd-9446-dcd2fe84f777" containerID="981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647" exitCode=0 Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.100162 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.100190 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dae66685-4314-4edd-9446-dcd2fe84f777","Type":"ContainerDied","Data":"981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647"} Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.100616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"dae66685-4314-4edd-9446-dcd2fe84f777","Type":"ContainerDied","Data":"430b01b30ea94c1bab0ffcb6b32499451d35f4bcde1ac17c7976437b17e57695"} Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.100655 4747 scope.go:117] "RemoveContainer" containerID="981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.104771 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dae66685-4314-4edd-9446-dcd2fe84f777" (UID: "dae66685-4314-4edd-9446-dcd2fe84f777"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.124787 4747 scope.go:117] "RemoveContainer" containerID="981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647" Feb 02 09:14:47 crc kubenswrapper[4747]: E0202 09:14:47.125337 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647\": container with ID starting with 981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647 not found: ID does not exist" containerID="981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.125384 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647"} err="failed to get container status \"981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647\": rpc error: code = NotFound desc = could not find container \"981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647\": container with ID starting with 981a751ee65a4efc1dfb56f82e0dcb8e37e221f0da5837f338c836f928f91647 not found: ID does not exist" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.159466 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.159506 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dae66685-4314-4edd-9446-dcd2fe84f777-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.159517 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-845wr\" (UniqueName: \"kubernetes.io/projected/dae66685-4314-4edd-9446-dcd2fe84f777-kube-api-access-845wr\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.332272 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 09:14:47 crc kubenswrapper[4747]: W0202 09:14:47.335776 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3909e3d2_a56c_4a5e_ac77_d65f07d595de.slice/crio-f503c1a11d86de97a3d283ea2f5bfe8a614ec873f8eaf4c0465195b69eaa9e0a WatchSource:0}: Error finding container f503c1a11d86de97a3d283ea2f5bfe8a614ec873f8eaf4c0465195b69eaa9e0a: Status 404 returned error can't find the container with id f503c1a11d86de97a3d283ea2f5bfe8a614ec873f8eaf4c0465195b69eaa9e0a Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.485816 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.494797 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.516315 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:47 crc kubenswrapper[4747]: E0202 09:14:47.516793 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae66685-4314-4edd-9446-dcd2fe84f777" containerName="nova-scheduler-scheduler" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.516813 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae66685-4314-4edd-9446-dcd2fe84f777" containerName="nova-scheduler-scheduler" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.517130 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dae66685-4314-4edd-9446-dcd2fe84f777" containerName="nova-scheduler-scheduler" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.518030 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.528397 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.551177 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.569378 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr6pw\" (UniqueName: \"kubernetes.io/projected/5053c6b4-d732-46f3-a4ee-29fad06b06fb-kube-api-access-rr6pw\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.569571 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5053c6b4-d732-46f3-a4ee-29fad06b06fb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.569923 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5053c6b4-d732-46f3-a4ee-29fad06b06fb-config-data\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.672554 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr6pw\" (UniqueName: \"kubernetes.io/projected/5053c6b4-d732-46f3-a4ee-29fad06b06fb-kube-api-access-rr6pw\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.672967 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5053c6b4-d732-46f3-a4ee-29fad06b06fb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.673026 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5053c6b4-d732-46f3-a4ee-29fad06b06fb-config-data\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.676411 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5053c6b4-d732-46f3-a4ee-29fad06b06fb-config-data\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.677260 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5053c6b4-d732-46f3-a4ee-29fad06b06fb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.691746 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr6pw\" (UniqueName: \"kubernetes.io/projected/5053c6b4-d732-46f3-a4ee-29fad06b06fb-kube-api-access-rr6pw\") pod \"nova-scheduler-0\" (UID: \"5053c6b4-d732-46f3-a4ee-29fad06b06fb\") " pod="openstack/nova-scheduler-0" Feb 02 09:14:47 crc kubenswrapper[4747]: I0202 09:14:47.846579 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.113522 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3909e3d2-a56c-4a5e-ac77-d65f07d595de","Type":"ContainerStarted","Data":"b0138826e5181b18dae345d1e7b21daa3522c1d6391dbcccba1f81f44a7c460a"} Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.113860 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3909e3d2-a56c-4a5e-ac77-d65f07d595de","Type":"ContainerStarted","Data":"0bcf340ea98c79bd2a6e4203e53e586494dfabd723b827745aa35c0e68c433d7"} Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.113876 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3909e3d2-a56c-4a5e-ac77-d65f07d595de","Type":"ContainerStarted","Data":"f503c1a11d86de97a3d283ea2f5bfe8a614ec873f8eaf4c0465195b69eaa9e0a"} Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.130820 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.130795735 podStartE2EDuration="2.130795735s" podCreationTimestamp="2026-02-02 09:14:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:48.128218748 +0000 UTC m=+1100.672557201" watchObservedRunningTime="2026-02-02 09:14:48.130795735 +0000 UTC m=+1100.675134168" Feb 02 09:14:48 crc kubenswrapper[4747]: W0202 09:14:48.311151 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5053c6b4_d732_46f3_a4ee_29fad06b06fb.slice/crio-0b26548d0bb927b7b370a9ccac4cf89f4354492069ca450ed94a3ca08586ec7c WatchSource:0}: Error finding container 0b26548d0bb927b7b370a9ccac4cf89f4354492069ca450ed94a3ca08586ec7c: Status 404 returned error can't find the container with id 0b26548d0bb927b7b370a9ccac4cf89f4354492069ca450ed94a3ca08586ec7c Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.313181 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.363627 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="325ebd33-a624-410a-bde9-cac6f02e9ddc" path="/var/lib/kubelet/pods/325ebd33-a624-410a-bde9-cac6f02e9ddc/volumes" Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.364762 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dae66685-4314-4edd-9446-dcd2fe84f777" path="/var/lib/kubelet/pods/dae66685-4314-4edd-9446-dcd2fe84f777/volumes" Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.508463 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": read tcp 10.217.0.2:59816->10.217.0.197:8775: read: connection reset by peer" Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.508977 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": read tcp 10.217.0.2:59818->10.217.0.197:8775: read: connection reset by peer" Feb 02 09:14:48 crc kubenswrapper[4747]: I0202 09:14:48.904362 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.002744 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-combined-ca-bundle\") pod \"902a6326-e93d-4f54-b760-62f58b36f0f7\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.002902 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-config-data\") pod \"902a6326-e93d-4f54-b760-62f58b36f0f7\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.002973 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/902a6326-e93d-4f54-b760-62f58b36f0f7-logs\") pod \"902a6326-e93d-4f54-b760-62f58b36f0f7\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.003137 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5q8d6\" (UniqueName: \"kubernetes.io/projected/902a6326-e93d-4f54-b760-62f58b36f0f7-kube-api-access-5q8d6\") pod \"902a6326-e93d-4f54-b760-62f58b36f0f7\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.003218 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-nova-metadata-tls-certs\") pod \"902a6326-e93d-4f54-b760-62f58b36f0f7\" (UID: \"902a6326-e93d-4f54-b760-62f58b36f0f7\") " Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.003696 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/902a6326-e93d-4f54-b760-62f58b36f0f7-logs" (OuterVolumeSpecName: "logs") pod "902a6326-e93d-4f54-b760-62f58b36f0f7" (UID: "902a6326-e93d-4f54-b760-62f58b36f0f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.010221 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/902a6326-e93d-4f54-b760-62f58b36f0f7-kube-api-access-5q8d6" (OuterVolumeSpecName: "kube-api-access-5q8d6") pod "902a6326-e93d-4f54-b760-62f58b36f0f7" (UID: "902a6326-e93d-4f54-b760-62f58b36f0f7"). InnerVolumeSpecName "kube-api-access-5q8d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.030798 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "902a6326-e93d-4f54-b760-62f58b36f0f7" (UID: "902a6326-e93d-4f54-b760-62f58b36f0f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.065042 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-config-data" (OuterVolumeSpecName: "config-data") pod "902a6326-e93d-4f54-b760-62f58b36f0f7" (UID: "902a6326-e93d-4f54-b760-62f58b36f0f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.068356 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "902a6326-e93d-4f54-b760-62f58b36f0f7" (UID: "902a6326-e93d-4f54-b760-62f58b36f0f7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.105602 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5q8d6\" (UniqueName: \"kubernetes.io/projected/902a6326-e93d-4f54-b760-62f58b36f0f7-kube-api-access-5q8d6\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.105637 4747 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.105652 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.105663 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/902a6326-e93d-4f54-b760-62f58b36f0f7-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.105690 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/902a6326-e93d-4f54-b760-62f58b36f0f7-logs\") on node \"crc\" DevicePath \"\"" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.123548 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5053c6b4-d732-46f3-a4ee-29fad06b06fb","Type":"ContainerStarted","Data":"81f1668ccc503678763dbec8791090fca4ade8d32239d33c6f3eda594154b325"} Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.123598 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5053c6b4-d732-46f3-a4ee-29fad06b06fb","Type":"ContainerStarted","Data":"0b26548d0bb927b7b370a9ccac4cf89f4354492069ca450ed94a3ca08586ec7c"} Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.125743 4747 generic.go:334] "Generic (PLEG): container finished" podID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerID="2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723" exitCode=0 Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.126218 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.130090 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"902a6326-e93d-4f54-b760-62f58b36f0f7","Type":"ContainerDied","Data":"2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723"} Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.130126 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"902a6326-e93d-4f54-b760-62f58b36f0f7","Type":"ContainerDied","Data":"bc795209b9140c4604c2da9754565a5144d7b0c842d0dd57422b1512a01ed250"} Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.130144 4747 scope.go:117] "RemoveContainer" containerID="2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.148174 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.148157091 podStartE2EDuration="2.148157091s" podCreationTimestamp="2026-02-02 09:14:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:49.145170164 +0000 UTC m=+1101.689508627" watchObservedRunningTime="2026-02-02 09:14:49.148157091 +0000 UTC m=+1101.692495524" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.161976 4747 scope.go:117] "RemoveContainer" containerID="d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.178477 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.194436 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.198072 4747 scope.go:117] "RemoveContainer" containerID="2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723" Feb 02 09:14:49 crc kubenswrapper[4747]: E0202 09:14:49.198339 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723\": container with ID starting with 2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723 not found: ID does not exist" containerID="2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.198365 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723"} err="failed to get container status \"2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723\": rpc error: code = NotFound desc = could not find container \"2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723\": container with ID starting with 2ba72b96a317c5c3f70d39c6e684a295814e5a432e09b243781ce3eb97832723 not found: ID does not exist" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.198383 4747 scope.go:117] "RemoveContainer" containerID="d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925" Feb 02 09:14:49 crc kubenswrapper[4747]: E0202 09:14:49.198682 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925\": container with ID starting with d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925 not found: ID does not exist" containerID="d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.198703 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925"} err="failed to get container status \"d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925\": rpc error: code = NotFound desc = could not find container \"d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925\": container with ID starting with d8631c013c02bb0401804bf1d81249388323d51a552af9522c5809e343110925 not found: ID does not exist" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.204133 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:49 crc kubenswrapper[4747]: E0202 09:14:49.204775 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-log" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.204798 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-log" Feb 02 09:14:49 crc kubenswrapper[4747]: E0202 09:14:49.204843 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-metadata" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.204855 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-metadata" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.205214 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-log" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.205252 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" containerName="nova-metadata-metadata" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.207358 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.210590 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.210686 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.215604 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.307901 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-config-data\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.308200 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.308321 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.308354 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-logs\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.308411 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zx59w\" (UniqueName: \"kubernetes.io/projected/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-kube-api-access-zx59w\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.410164 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-config-data\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.410215 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.410307 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.410353 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-logs\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.410391 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zx59w\" (UniqueName: \"kubernetes.io/projected/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-kube-api-access-zx59w\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.411730 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-logs\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.415266 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.416100 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.416973 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-config-data\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.437837 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zx59w\" (UniqueName: \"kubernetes.io/projected/d6c1b359-f618-48a3-9ef9-f13da6cc85a9-kube-api-access-zx59w\") pod \"nova-metadata-0\" (UID: \"d6c1b359-f618-48a3-9ef9-f13da6cc85a9\") " pod="openstack/nova-metadata-0" Feb 02 09:14:49 crc kubenswrapper[4747]: I0202 09:14:49.561723 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 09:14:50 crc kubenswrapper[4747]: I0202 09:14:50.029623 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 09:14:50 crc kubenswrapper[4747]: I0202 09:14:50.143201 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d6c1b359-f618-48a3-9ef9-f13da6cc85a9","Type":"ContainerStarted","Data":"2e33e3ef1c7bd74416d1e15db91fe6641a5f2a2fb193729cf9219db17265079f"} Feb 02 09:14:50 crc kubenswrapper[4747]: I0202 09:14:50.371376 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="902a6326-e93d-4f54-b760-62f58b36f0f7" path="/var/lib/kubelet/pods/902a6326-e93d-4f54-b760-62f58b36f0f7/volumes" Feb 02 09:14:50 crc kubenswrapper[4747]: I0202 09:14:50.518566 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:14:50 crc kubenswrapper[4747]: I0202 09:14:50.518638 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:14:51 crc kubenswrapper[4747]: I0202 09:14:51.152162 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d6c1b359-f618-48a3-9ef9-f13da6cc85a9","Type":"ContainerStarted","Data":"73421d086611336af7bce0273841a5300574a94552507c0901359bcc9add7a17"} Feb 02 09:14:51 crc kubenswrapper[4747]: I0202 09:14:51.152211 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d6c1b359-f618-48a3-9ef9-f13da6cc85a9","Type":"ContainerStarted","Data":"c794ca4b08ac5848ee561e46a91ed483dfc064335e983b3507a2e729105d60c6"} Feb 02 09:14:52 crc kubenswrapper[4747]: I0202 09:14:52.848321 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 09:14:54 crc kubenswrapper[4747]: I0202 09:14:54.561781 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 09:14:54 crc kubenswrapper[4747]: I0202 09:14:54.562247 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 09:14:56 crc kubenswrapper[4747]: I0202 09:14:56.828010 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 09:14:56 crc kubenswrapper[4747]: I0202 09:14:56.828380 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 09:14:57 crc kubenswrapper[4747]: I0202 09:14:57.844092 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3909e3d2-a56c-4a5e-ac77-d65f07d595de" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:57 crc kubenswrapper[4747]: I0202 09:14:57.844086 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3909e3d2-a56c-4a5e-ac77-d65f07d595de" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 09:14:57 crc kubenswrapper[4747]: I0202 09:14:57.847666 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 09:14:57 crc kubenswrapper[4747]: I0202 09:14:57.891635 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 09:14:57 crc kubenswrapper[4747]: I0202 09:14:57.917348 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=8.917326433 podStartE2EDuration="8.917326433s" podCreationTimestamp="2026-02-02 09:14:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:14:51.175050811 +0000 UTC m=+1103.719389274" watchObservedRunningTime="2026-02-02 09:14:57.917326433 +0000 UTC m=+1110.461664876" Feb 02 09:14:58 crc kubenswrapper[4747]: I0202 09:14:58.282091 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 09:14:59 crc kubenswrapper[4747]: I0202 09:14:59.562428 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 09:14:59 crc kubenswrapper[4747]: I0202 09:14:59.562468 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.178313 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq"] Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.179685 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.182167 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.182688 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.188492 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq"] Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.344134 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f644a51-ebfb-406b-8303-9fe50eb5f53e-config-volume\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.344247 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpdwx\" (UniqueName: \"kubernetes.io/projected/2f644a51-ebfb-406b-8303-9fe50eb5f53e-kube-api-access-jpdwx\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.344289 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f644a51-ebfb-406b-8303-9fe50eb5f53e-secret-volume\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.446039 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f644a51-ebfb-406b-8303-9fe50eb5f53e-config-volume\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.446385 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpdwx\" (UniqueName: \"kubernetes.io/projected/2f644a51-ebfb-406b-8303-9fe50eb5f53e-kube-api-access-jpdwx\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.446453 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f644a51-ebfb-406b-8303-9fe50eb5f53e-secret-volume\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.447376 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f644a51-ebfb-406b-8303-9fe50eb5f53e-config-volume\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.459841 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f644a51-ebfb-406b-8303-9fe50eb5f53e-secret-volume\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.479521 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpdwx\" (UniqueName: \"kubernetes.io/projected/2f644a51-ebfb-406b-8303-9fe50eb5f53e-kube-api-access-jpdwx\") pod \"collect-profiles-29500395-kwddq\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.509868 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.573118 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d6c1b359-f618-48a3-9ef9-f13da6cc85a9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.573170 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d6c1b359-f618-48a3-9ef9-f13da6cc85a9" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 09:15:00 crc kubenswrapper[4747]: I0202 09:15:00.968617 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq"] Feb 02 09:15:01 crc kubenswrapper[4747]: I0202 09:15:01.269148 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" event={"ID":"2f644a51-ebfb-406b-8303-9fe50eb5f53e","Type":"ContainerStarted","Data":"ac144fe884ccdfef29a23fbcf40e28edea88c3188c8b06526e1ba16fdba28c1d"} Feb 02 09:15:01 crc kubenswrapper[4747]: I0202 09:15:01.269494 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" event={"ID":"2f644a51-ebfb-406b-8303-9fe50eb5f53e","Type":"ContainerStarted","Data":"de113760618ac6a9aa75bf5e806392ad7482a5f29e1c3c55c30de61f7b80d5aa"} Feb 02 09:15:01 crc kubenswrapper[4747]: I0202 09:15:01.291148 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" podStartSLOduration=1.2911269029999999 podStartE2EDuration="1.291126903s" podCreationTimestamp="2026-02-02 09:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:15:01.282601834 +0000 UTC m=+1113.826940277" watchObservedRunningTime="2026-02-02 09:15:01.291126903 +0000 UTC m=+1113.835465346" Feb 02 09:15:02 crc kubenswrapper[4747]: I0202 09:15:02.278475 4747 generic.go:334] "Generic (PLEG): container finished" podID="2f644a51-ebfb-406b-8303-9fe50eb5f53e" containerID="ac144fe884ccdfef29a23fbcf40e28edea88c3188c8b06526e1ba16fdba28c1d" exitCode=0 Feb 02 09:15:02 crc kubenswrapper[4747]: I0202 09:15:02.278545 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" event={"ID":"2f644a51-ebfb-406b-8303-9fe50eb5f53e","Type":"ContainerDied","Data":"ac144fe884ccdfef29a23fbcf40e28edea88c3188c8b06526e1ba16fdba28c1d"} Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.607356 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.808371 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f644a51-ebfb-406b-8303-9fe50eb5f53e-secret-volume\") pod \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.808805 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f644a51-ebfb-406b-8303-9fe50eb5f53e-config-volume\") pod \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.809240 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpdwx\" (UniqueName: \"kubernetes.io/projected/2f644a51-ebfb-406b-8303-9fe50eb5f53e-kube-api-access-jpdwx\") pod \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\" (UID: \"2f644a51-ebfb-406b-8303-9fe50eb5f53e\") " Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.810114 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f644a51-ebfb-406b-8303-9fe50eb5f53e-config-volume" (OuterVolumeSpecName: "config-volume") pod "2f644a51-ebfb-406b-8303-9fe50eb5f53e" (UID: "2f644a51-ebfb-406b-8303-9fe50eb5f53e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.814342 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f644a51-ebfb-406b-8303-9fe50eb5f53e-kube-api-access-jpdwx" (OuterVolumeSpecName: "kube-api-access-jpdwx") pod "2f644a51-ebfb-406b-8303-9fe50eb5f53e" (UID: "2f644a51-ebfb-406b-8303-9fe50eb5f53e"). InnerVolumeSpecName "kube-api-access-jpdwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.817213 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f644a51-ebfb-406b-8303-9fe50eb5f53e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2f644a51-ebfb-406b-8303-9fe50eb5f53e" (UID: "2f644a51-ebfb-406b-8303-9fe50eb5f53e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.912378 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpdwx\" (UniqueName: \"kubernetes.io/projected/2f644a51-ebfb-406b-8303-9fe50eb5f53e-kube-api-access-jpdwx\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.912440 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f644a51-ebfb-406b-8303-9fe50eb5f53e-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:03 crc kubenswrapper[4747]: I0202 09:15:03.912461 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f644a51-ebfb-406b-8303-9fe50eb5f53e-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:04 crc kubenswrapper[4747]: I0202 09:15:04.300810 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" event={"ID":"2f644a51-ebfb-406b-8303-9fe50eb5f53e","Type":"ContainerDied","Data":"de113760618ac6a9aa75bf5e806392ad7482a5f29e1c3c55c30de61f7b80d5aa"} Feb 02 09:15:04 crc kubenswrapper[4747]: I0202 09:15:04.300852 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de113760618ac6a9aa75bf5e806392ad7482a5f29e1c3c55c30de61f7b80d5aa" Feb 02 09:15:04 crc kubenswrapper[4747]: I0202 09:15:04.300894 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500395-kwddq" Feb 02 09:15:06 crc kubenswrapper[4747]: I0202 09:15:06.451451 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 09:15:06 crc kubenswrapper[4747]: I0202 09:15:06.833708 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 09:15:06 crc kubenswrapper[4747]: I0202 09:15:06.834723 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 09:15:06 crc kubenswrapper[4747]: I0202 09:15:06.835427 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 09:15:06 crc kubenswrapper[4747]: I0202 09:15:06.840498 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 09:15:07 crc kubenswrapper[4747]: I0202 09:15:07.325553 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 09:15:07 crc kubenswrapper[4747]: I0202 09:15:07.341285 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 09:15:09 crc kubenswrapper[4747]: I0202 09:15:09.567172 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 09:15:09 crc kubenswrapper[4747]: I0202 09:15:09.570434 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 09:15:09 crc kubenswrapper[4747]: I0202 09:15:09.574160 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 09:15:10 crc kubenswrapper[4747]: I0202 09:15:10.362142 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 09:15:17 crc kubenswrapper[4747]: I0202 09:15:17.842883 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:15:18 crc kubenswrapper[4747]: I0202 09:15:18.533085 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:15:20 crc kubenswrapper[4747]: I0202 09:15:20.518966 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:15:20 crc kubenswrapper[4747]: I0202 09:15:20.519297 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:15:21 crc kubenswrapper[4747]: I0202 09:15:21.603693 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerName="rabbitmq" containerID="cri-o://b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130" gracePeriod=604797 Feb 02 09:15:23 crc kubenswrapper[4747]: I0202 09:15:23.539782 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" containerName="rabbitmq" containerID="cri-o://230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33" gracePeriod=604795 Feb 02 09:15:26 crc kubenswrapper[4747]: I0202 09:15:26.662671 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.95:5671: connect: connection refused" Feb 02 09:15:26 crc kubenswrapper[4747]: I0202 09:15:26.949376 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.96:5671: connect: connection refused" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.163406 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257395 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-config-data\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257491 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257586 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-confd\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257651 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94de1c50-3e73-4d3c-9f71-194b0fd4f005-erlang-cookie-secret\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257709 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-tls\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257769 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-plugins\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257830 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-plugins-conf\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.257913 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-server-conf\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.258040 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blzlq\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-kube-api-access-blzlq\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.258095 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-erlang-cookie\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.258160 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94de1c50-3e73-4d3c-9f71-194b0fd4f005-pod-info\") pod \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\" (UID: \"94de1c50-3e73-4d3c-9f71-194b0fd4f005\") " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.259212 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.265251 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.266419 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.269432 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/94de1c50-3e73-4d3c-9f71-194b0fd4f005-pod-info" (OuterVolumeSpecName: "pod-info") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.269436 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94de1c50-3e73-4d3c-9f71-194b0fd4f005-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.273532 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-kube-api-access-blzlq" (OuterVolumeSpecName: "kube-api-access-blzlq") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "kube-api-access-blzlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.274170 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.286273 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.353306 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-server-conf" (OuterVolumeSpecName: "server-conf") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.360807 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blzlq\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-kube-api-access-blzlq\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.360841 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.360851 4747 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/94de1c50-3e73-4d3c-9f71-194b0fd4f005-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.363874 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-config-data" (OuterVolumeSpecName: "config-data") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.364044 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.364065 4747 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/94de1c50-3e73-4d3c-9f71-194b0fd4f005-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.364078 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.364087 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.364096 4747 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.364104 4747 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.394129 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.438996 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "94de1c50-3e73-4d3c-9f71-194b0fd4f005" (UID: "94de1c50-3e73-4d3c-9f71-194b0fd4f005"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.466086 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/94de1c50-3e73-4d3c-9f71-194b0fd4f005-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.466205 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.466222 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/94de1c50-3e73-4d3c-9f71-194b0fd4f005-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.526583 4747 generic.go:334] "Generic (PLEG): container finished" podID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerID="b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130" exitCode=0 Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.526625 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"94de1c50-3e73-4d3c-9f71-194b0fd4f005","Type":"ContainerDied","Data":"b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130"} Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.526649 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"94de1c50-3e73-4d3c-9f71-194b0fd4f005","Type":"ContainerDied","Data":"f801ccd86058b8f875a268bb3f6aa42d28d8034d61e41f167735d9f24ce5af0d"} Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.526664 4747 scope.go:117] "RemoveContainer" containerID="b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.526779 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.561754 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.564021 4747 scope.go:117] "RemoveContainer" containerID="8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.577048 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.618116 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:15:28 crc kubenswrapper[4747]: E0202 09:15:28.618485 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerName="setup-container" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.618502 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerName="setup-container" Feb 02 09:15:28 crc kubenswrapper[4747]: E0202 09:15:28.618525 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f644a51-ebfb-406b-8303-9fe50eb5f53e" containerName="collect-profiles" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.618532 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f644a51-ebfb-406b-8303-9fe50eb5f53e" containerName="collect-profiles" Feb 02 09:15:28 crc kubenswrapper[4747]: E0202 09:15:28.618546 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerName="rabbitmq" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.618552 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerName="rabbitmq" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.618702 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f644a51-ebfb-406b-8303-9fe50eb5f53e" containerName="collect-profiles" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.618712 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" containerName="rabbitmq" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.619691 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.619791 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.647185 4747 scope.go:117] "RemoveContainer" containerID="b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.647565 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.647576 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-b792z" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.647712 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.647782 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.647915 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 09:15:28 crc kubenswrapper[4747]: E0202 09:15:28.648068 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130\": container with ID starting with b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130 not found: ID does not exist" containerID="b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.648100 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130"} err="failed to get container status \"b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130\": rpc error: code = NotFound desc = could not find container \"b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130\": container with ID starting with b6dee8b11d5b23f9945c2dbbd26325998a33f886b82b9e1548027e580379d130 not found: ID does not exist" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.648127 4747 scope.go:117] "RemoveContainer" containerID="8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.648135 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 09:15:28 crc kubenswrapper[4747]: E0202 09:15:28.648525 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085\": container with ID starting with 8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085 not found: ID does not exist" containerID="8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.648551 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085"} err="failed to get container status \"8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085\": rpc error: code = NotFound desc = could not find container \"8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085\": container with ID starting with 8dda65f0cfc52219954841248b3c391ae09fa326b38b78cd33b20d70c2775085 not found: ID does not exist" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.650133 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671501 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671602 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671645 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671668 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l7bf\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-kube-api-access-7l7bf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671731 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671771 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671812 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671835 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671886 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.671904 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.773636 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774012 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774156 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774280 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774390 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l7bf\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-kube-api-access-7l7bf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774522 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774667 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774778 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774885 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.775046 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.775151 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774963 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.774215 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.775456 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.775597 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.776731 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.778467 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.778678 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.778757 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.779167 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.779203 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.791549 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l7bf\" (UniqueName: \"kubernetes.io/projected/ff8e9063-bf18-45e0-92ef-81bc7eee9d50-kube-api-access-7l7bf\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.815532 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"ff8e9063-bf18-45e0-92ef-81bc7eee9d50\") " pod="openstack/rabbitmq-server-0" Feb 02 09:15:28 crc kubenswrapper[4747]: I0202 09:15:28.969310 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 09:15:29 crc kubenswrapper[4747]: W0202 09:15:29.420677 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff8e9063_bf18_45e0_92ef_81bc7eee9d50.slice/crio-770a46a5170340da8117189de893500ad7700af45e021155b0e7696cda5e4330 WatchSource:0}: Error finding container 770a46a5170340da8117189de893500ad7700af45e021155b0e7696cda5e4330: Status 404 returned error can't find the container with id 770a46a5170340da8117189de893500ad7700af45e021155b0e7696cda5e4330 Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.424834 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.542967 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff8e9063-bf18-45e0-92ef-81bc7eee9d50","Type":"ContainerStarted","Data":"770a46a5170340da8117189de893500ad7700af45e021155b0e7696cda5e4330"} Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.643658 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-qzj79"] Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.645885 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.648387 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.657064 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-qzj79"] Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.693151 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-config\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.693491 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ds96\" (UniqueName: \"kubernetes.io/projected/65851c49-99be-44af-a0c7-c2d851fb56d7-kube-api-access-9ds96\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.693628 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-svc\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.693768 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.693828 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.693880 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.694017 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.796048 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.796114 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.796141 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.796166 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.796195 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-config\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.796232 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ds96\" (UniqueName: \"kubernetes.io/projected/65851c49-99be-44af-a0c7-c2d851fb56d7-kube-api-access-9ds96\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.796289 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-svc\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.797245 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-svc\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.798441 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.798970 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-config\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.799470 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.799737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.805248 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.820844 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ds96\" (UniqueName: \"kubernetes.io/projected/65851c49-99be-44af-a0c7-c2d851fb56d7-kube-api-access-9ds96\") pod \"dnsmasq-dns-67b789f86c-qzj79\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:29 crc kubenswrapper[4747]: I0202 09:15:29.969062 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.144172 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306381 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-erlang-cookie\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306451 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-confd\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306504 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-plugins-conf\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306543 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-server-conf\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306572 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306632 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9627032-9b68-4e48-8372-fabd9de3d289-pod-info\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306671 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-config-data\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306695 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-plugins\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306749 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9627032-9b68-4e48-8372-fabd9de3d289-erlang-cookie-secret\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.306771 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-598t6\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-kube-api-access-598t6\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.307035 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-tls\") pod \"c9627032-9b68-4e48-8372-fabd9de3d289\" (UID: \"c9627032-9b68-4e48-8372-fabd9de3d289\") " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.308402 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.308657 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.308868 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.350957 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94de1c50-3e73-4d3c-9f71-194b0fd4f005" path="/var/lib/kubelet/pods/94de1c50-3e73-4d3c-9f71-194b0fd4f005/volumes" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.393978 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c9627032-9b68-4e48-8372-fabd9de3d289-pod-info" (OuterVolumeSpecName: "pod-info") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.394087 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9627032-9b68-4e48-8372-fabd9de3d289-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.394105 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.394226 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.395452 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-kube-api-access-598t6" (OuterVolumeSpecName: "kube-api-access-598t6") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "kube-api-access-598t6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408803 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408832 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408844 4747 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408871 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408879 4747 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9627032-9b68-4e48-8372-fabd9de3d289-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408888 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408895 4747 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9627032-9b68-4e48-8372-fabd9de3d289-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.408904 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-598t6\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-kube-api-access-598t6\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.432432 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.511050 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.553587 4747 generic.go:334] "Generic (PLEG): container finished" podID="c9627032-9b68-4e48-8372-fabd9de3d289" containerID="230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33" exitCode=0 Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.553635 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c9627032-9b68-4e48-8372-fabd9de3d289","Type":"ContainerDied","Data":"230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33"} Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.553659 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.553676 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c9627032-9b68-4e48-8372-fabd9de3d289","Type":"ContainerDied","Data":"9437999481dc46a111427999ffd79d7ee534b6df3b68cb43b511375e71f9caf1"} Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.553696 4747 scope.go:117] "RemoveContainer" containerID="230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.810096 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-config-data" (OuterVolumeSpecName: "config-data") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.818453 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.870863 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-server-conf" (OuterVolumeSpecName: "server-conf") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.920685 4747 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9627032-9b68-4e48-8372-fabd9de3d289-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:30 crc kubenswrapper[4747]: I0202 09:15:30.968566 4747 scope.go:117] "RemoveContainer" containerID="2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.018666 4747 scope.go:117] "RemoveContainer" containerID="230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33" Feb 02 09:15:31 crc kubenswrapper[4747]: E0202 09:15:31.019418 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33\": container with ID starting with 230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33 not found: ID does not exist" containerID="230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.019449 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33"} err="failed to get container status \"230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33\": rpc error: code = NotFound desc = could not find container \"230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33\": container with ID starting with 230d576e2d84d66dc15df91c49906092c221fc880be5d719518ca166cb43de33 not found: ID does not exist" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.019469 4747 scope.go:117] "RemoveContainer" containerID="2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40" Feb 02 09:15:31 crc kubenswrapper[4747]: E0202 09:15:31.020058 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40\": container with ID starting with 2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40 not found: ID does not exist" containerID="2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.020081 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40"} err="failed to get container status \"2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40\": rpc error: code = NotFound desc = could not find container \"2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40\": container with ID starting with 2682fbdbca93f9f33e69d3561a9e35398630e02daf5e02906ab9b5637e110d40 not found: ID does not exist" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.031488 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c9627032-9b68-4e48-8372-fabd9de3d289" (UID: "c9627032-9b68-4e48-8372-fabd9de3d289"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.123636 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9627032-9b68-4e48-8372-fabd9de3d289-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.135704 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-qzj79"] Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.191368 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.202916 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.219736 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:15:31 crc kubenswrapper[4747]: E0202 09:15:31.221556 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" containerName="setup-container" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.221596 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" containerName="setup-container" Feb 02 09:15:31 crc kubenswrapper[4747]: E0202 09:15:31.221625 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" containerName="rabbitmq" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.221633 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" containerName="rabbitmq" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.221860 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" containerName="rabbitmq" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.223193 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.229639 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.229927 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-8bnbj" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.230046 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.230214 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.230293 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.230337 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.230456 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.254511 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326445 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326562 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7eee7ec2-61e8-40b8-86c6-618d811a6b58-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326602 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326640 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326669 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kxzq\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-kube-api-access-7kxzq\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326698 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326719 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326744 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326836 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.326978 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7eee7ec2-61e8-40b8-86c6-618d811a6b58-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.428410 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7eee7ec2-61e8-40b8-86c6-618d811a6b58-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.428694 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.428797 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7eee7ec2-61e8-40b8-86c6-618d811a6b58-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.428904 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429146 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kxzq\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-kube-api-access-7kxzq\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429216 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429273 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429342 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429446 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429516 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429569 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429596 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429274 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.429362 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.430658 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.434132 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.434520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7eee7ec2-61e8-40b8-86c6-618d811a6b58-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.434946 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.435400 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7eee7ec2-61e8-40b8-86c6-618d811a6b58-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.441385 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7eee7ec2-61e8-40b8-86c6-618d811a6b58-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.446308 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kxzq\" (UniqueName: \"kubernetes.io/projected/7eee7ec2-61e8-40b8-86c6-618d811a6b58-kube-api-access-7kxzq\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.461122 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7eee7ec2-61e8-40b8-86c6-618d811a6b58\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.558729 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.567793 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff8e9063-bf18-45e0-92ef-81bc7eee9d50","Type":"ContainerStarted","Data":"388554bfde294bb65a45b65ee38fe8238f1b87fe1ee68b1173bf6aea7d39b0d1"} Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.570450 4747 generic.go:334] "Generic (PLEG): container finished" podID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerID="0b4a1708f84828ec14d7bf6f0a2e24eaa1e610541502f1dd58085771b95ed0ed" exitCode=0 Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.570497 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" event={"ID":"65851c49-99be-44af-a0c7-c2d851fb56d7","Type":"ContainerDied","Data":"0b4a1708f84828ec14d7bf6f0a2e24eaa1e610541502f1dd58085771b95ed0ed"} Feb 02 09:15:31 crc kubenswrapper[4747]: I0202 09:15:31.570525 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" event={"ID":"65851c49-99be-44af-a0c7-c2d851fb56d7","Type":"ContainerStarted","Data":"62ee40eb99a9ed975bfd8f4aed14b9b70c1931ac5d4f77c3b384c658da40a028"} Feb 02 09:15:32 crc kubenswrapper[4747]: I0202 09:15:32.001643 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 09:15:32 crc kubenswrapper[4747]: I0202 09:15:32.352609 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9627032-9b68-4e48-8372-fabd9de3d289" path="/var/lib/kubelet/pods/c9627032-9b68-4e48-8372-fabd9de3d289/volumes" Feb 02 09:15:32 crc kubenswrapper[4747]: I0202 09:15:32.588906 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7eee7ec2-61e8-40b8-86c6-618d811a6b58","Type":"ContainerStarted","Data":"70ef571c647bcc9b146c6c84c3a9898ee958099553a72f9e8009249b8ba033cf"} Feb 02 09:15:32 crc kubenswrapper[4747]: I0202 09:15:32.592594 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" event={"ID":"65851c49-99be-44af-a0c7-c2d851fb56d7","Type":"ContainerStarted","Data":"130784e41bceb93ff5f571442a75fc6b621292fdef64a86db9d580d1d87d8678"} Feb 02 09:15:32 crc kubenswrapper[4747]: I0202 09:15:32.630576 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" podStartSLOduration=3.63055471 podStartE2EDuration="3.63055471s" podCreationTimestamp="2026-02-02 09:15:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:15:32.620776069 +0000 UTC m=+1145.165114552" watchObservedRunningTime="2026-02-02 09:15:32.63055471 +0000 UTC m=+1145.174893153" Feb 02 09:15:33 crc kubenswrapper[4747]: I0202 09:15:33.599809 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:34 crc kubenswrapper[4747]: I0202 09:15:34.619968 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7eee7ec2-61e8-40b8-86c6-618d811a6b58","Type":"ContainerStarted","Data":"46269bde7bb1fafb7264ffb231ab2d086f2c7460fe84156dd80718da7f0ab521"} Feb 02 09:15:39 crc kubenswrapper[4747]: I0202 09:15:39.971213 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.038994 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-w57fn"] Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.039291 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" podUID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerName="dnsmasq-dns" containerID="cri-o://b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce" gracePeriod=10 Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.192753 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-qdz6h"] Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.207543 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.208660 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-qdz6h"] Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.314348 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.314489 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-config\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.314626 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.314683 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.314833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzpfb\" (UniqueName: \"kubernetes.io/projected/98cae643-025f-440f-8c6b-84d9589ab11c-kube-api-access-vzpfb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.314963 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.315002 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.416553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzpfb\" (UniqueName: \"kubernetes.io/projected/98cae643-025f-440f-8c6b-84d9589ab11c-kube-api-access-vzpfb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.416915 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.416964 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.416999 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.417030 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-config\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.417102 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.417128 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.417853 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.417885 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-config\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.418477 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.418879 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.419032 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.419923 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/98cae643-025f-440f-8c6b-84d9589ab11c-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.436845 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzpfb\" (UniqueName: \"kubernetes.io/projected/98cae643-025f-440f-8c6b-84d9589ab11c-kube-api-access-vzpfb\") pod \"dnsmasq-dns-cb6ffcf87-qdz6h\" (UID: \"98cae643-025f-440f-8c6b-84d9589ab11c\") " pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.532052 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.540538 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.622407 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7grpj\" (UniqueName: \"kubernetes.io/projected/2fea7ef4-c513-4ded-93f4-689653dc8990-kube-api-access-7grpj\") pod \"2fea7ef4-c513-4ded-93f4-689653dc8990\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.622461 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-config\") pod \"2fea7ef4-c513-4ded-93f4-689653dc8990\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.622515 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-sb\") pod \"2fea7ef4-c513-4ded-93f4-689653dc8990\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.622565 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-svc\") pod \"2fea7ef4-c513-4ded-93f4-689653dc8990\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.622660 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-nb\") pod \"2fea7ef4-c513-4ded-93f4-689653dc8990\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.622691 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-swift-storage-0\") pod \"2fea7ef4-c513-4ded-93f4-689653dc8990\" (UID: \"2fea7ef4-c513-4ded-93f4-689653dc8990\") " Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.642489 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fea7ef4-c513-4ded-93f4-689653dc8990-kube-api-access-7grpj" (OuterVolumeSpecName: "kube-api-access-7grpj") pod "2fea7ef4-c513-4ded-93f4-689653dc8990" (UID: "2fea7ef4-c513-4ded-93f4-689653dc8990"). InnerVolumeSpecName "kube-api-access-7grpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.677440 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2fea7ef4-c513-4ded-93f4-689653dc8990" (UID: "2fea7ef4-c513-4ded-93f4-689653dc8990"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.689681 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-config" (OuterVolumeSpecName: "config") pod "2fea7ef4-c513-4ded-93f4-689653dc8990" (UID: "2fea7ef4-c513-4ded-93f4-689653dc8990"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.694270 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2fea7ef4-c513-4ded-93f4-689653dc8990" (UID: "2fea7ef4-c513-4ded-93f4-689653dc8990"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.694508 4747 generic.go:334] "Generic (PLEG): container finished" podID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerID="b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce" exitCode=0 Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.694557 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" event={"ID":"2fea7ef4-c513-4ded-93f4-689653dc8990","Type":"ContainerDied","Data":"b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce"} Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.694619 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" event={"ID":"2fea7ef4-c513-4ded-93f4-689653dc8990","Type":"ContainerDied","Data":"1873795ebd5197cf0282511b55c0ac7e618928f103ae556a74621a4cb6502432"} Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.694639 4747 scope.go:117] "RemoveContainer" containerID="b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.694817 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-w57fn" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.700568 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2fea7ef4-c513-4ded-93f4-689653dc8990" (UID: "2fea7ef4-c513-4ded-93f4-689653dc8990"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.725073 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7grpj\" (UniqueName: \"kubernetes.io/projected/2fea7ef4-c513-4ded-93f4-689653dc8990-kube-api-access-7grpj\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.725110 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.725121 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.725132 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.725143 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.729980 4747 scope.go:117] "RemoveContainer" containerID="cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.737111 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2fea7ef4-c513-4ded-93f4-689653dc8990" (UID: "2fea7ef4-c513-4ded-93f4-689653dc8990"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.754340 4747 scope.go:117] "RemoveContainer" containerID="b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce" Feb 02 09:15:40 crc kubenswrapper[4747]: E0202 09:15:40.754825 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce\": container with ID starting with b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce not found: ID does not exist" containerID="b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.754962 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce"} err="failed to get container status \"b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce\": rpc error: code = NotFound desc = could not find container \"b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce\": container with ID starting with b3c101c33aa30bdb64aa1145f7af8c16c51b6e050d1ff1c9ac87cc997c8053ce not found: ID does not exist" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.755073 4747 scope.go:117] "RemoveContainer" containerID="cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a" Feb 02 09:15:40 crc kubenswrapper[4747]: E0202 09:15:40.755814 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a\": container with ID starting with cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a not found: ID does not exist" containerID="cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.755865 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a"} err="failed to get container status \"cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a\": rpc error: code = NotFound desc = could not find container \"cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a\": container with ID starting with cbde4735805bea8249fcbffecaeaa9b02d249fbe4153226d3e6eea75b108155a not found: ID does not exist" Feb 02 09:15:40 crc kubenswrapper[4747]: I0202 09:15:40.826619 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fea7ef4-c513-4ded-93f4-689653dc8990-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:41 crc kubenswrapper[4747]: I0202 09:15:41.027430 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-qdz6h"] Feb 02 09:15:41 crc kubenswrapper[4747]: W0202 09:15:41.031991 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98cae643_025f_440f_8c6b_84d9589ab11c.slice/crio-a59cf9d9ebc1a44c6f9d11aedb85cfb8969859b5dcb57086c16357e87f3bd266 WatchSource:0}: Error finding container a59cf9d9ebc1a44c6f9d11aedb85cfb8969859b5dcb57086c16357e87f3bd266: Status 404 returned error can't find the container with id a59cf9d9ebc1a44c6f9d11aedb85cfb8969859b5dcb57086c16357e87f3bd266 Feb 02 09:15:41 crc kubenswrapper[4747]: I0202 09:15:41.039300 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-w57fn"] Feb 02 09:15:41 crc kubenswrapper[4747]: I0202 09:15:41.048625 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-w57fn"] Feb 02 09:15:41 crc kubenswrapper[4747]: I0202 09:15:41.710573 4747 generic.go:334] "Generic (PLEG): container finished" podID="98cae643-025f-440f-8c6b-84d9589ab11c" containerID="3adddef886891fdaf56d8a10ae8fd68c4347b1194a23e252fc8f2764dacbf10c" exitCode=0 Feb 02 09:15:41 crc kubenswrapper[4747]: I0202 09:15:41.710634 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" event={"ID":"98cae643-025f-440f-8c6b-84d9589ab11c","Type":"ContainerDied","Data":"3adddef886891fdaf56d8a10ae8fd68c4347b1194a23e252fc8f2764dacbf10c"} Feb 02 09:15:41 crc kubenswrapper[4747]: I0202 09:15:41.710679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" event={"ID":"98cae643-025f-440f-8c6b-84d9589ab11c","Type":"ContainerStarted","Data":"a59cf9d9ebc1a44c6f9d11aedb85cfb8969859b5dcb57086c16357e87f3bd266"} Feb 02 09:15:42 crc kubenswrapper[4747]: I0202 09:15:42.354260 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fea7ef4-c513-4ded-93f4-689653dc8990" path="/var/lib/kubelet/pods/2fea7ef4-c513-4ded-93f4-689653dc8990/volumes" Feb 02 09:15:42 crc kubenswrapper[4747]: I0202 09:15:42.722545 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" event={"ID":"98cae643-025f-440f-8c6b-84d9589ab11c","Type":"ContainerStarted","Data":"1e7fc4d44ad7d428e7ef5f9ece804c110aefe9d73add145d187c38bff941f7d7"} Feb 02 09:15:42 crc kubenswrapper[4747]: I0202 09:15:42.722825 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:42 crc kubenswrapper[4747]: I0202 09:15:42.746582 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" podStartSLOduration=2.74656607 podStartE2EDuration="2.74656607s" podCreationTimestamp="2026-02-02 09:15:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:15:42.738474493 +0000 UTC m=+1155.282812946" watchObservedRunningTime="2026-02-02 09:15:42.74656607 +0000 UTC m=+1155.290904503" Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.518925 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.519675 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.519737 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.520716 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cfcdbd71d081b839dee0ed836834b42e47c3f661c3c5fc464d12e55a08f08627"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.520814 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://cfcdbd71d081b839dee0ed836834b42e47c3f661c3c5fc464d12e55a08f08627" gracePeriod=600 Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.544370 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb6ffcf87-qdz6h" Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.609852 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-qzj79"] Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.610145 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" podUID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerName="dnsmasq-dns" containerID="cri-o://130784e41bceb93ff5f571442a75fc6b621292fdef64a86db9d580d1d87d8678" gracePeriod=10 Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.830073 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="cfcdbd71d081b839dee0ed836834b42e47c3f661c3c5fc464d12e55a08f08627" exitCode=0 Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.830440 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"cfcdbd71d081b839dee0ed836834b42e47c3f661c3c5fc464d12e55a08f08627"} Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.830478 4747 scope.go:117] "RemoveContainer" containerID="6ce984bd6987138797418ab2142f7c5fec0cece146a45e3138b5c727a63dd683" Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.865705 4747 generic.go:334] "Generic (PLEG): container finished" podID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerID="130784e41bceb93ff5f571442a75fc6b621292fdef64a86db9d580d1d87d8678" exitCode=0 Feb 02 09:15:50 crc kubenswrapper[4747]: I0202 09:15:50.865770 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" event={"ID":"65851c49-99be-44af-a0c7-c2d851fb56d7","Type":"ContainerDied","Data":"130784e41bceb93ff5f571442a75fc6b621292fdef64a86db9d580d1d87d8678"} Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.203632 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.340835 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-sb\") pod \"65851c49-99be-44af-a0c7-c2d851fb56d7\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.341142 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-openstack-edpm-ipam\") pod \"65851c49-99be-44af-a0c7-c2d851fb56d7\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.341210 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-swift-storage-0\") pod \"65851c49-99be-44af-a0c7-c2d851fb56d7\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.341381 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ds96\" (UniqueName: \"kubernetes.io/projected/65851c49-99be-44af-a0c7-c2d851fb56d7-kube-api-access-9ds96\") pod \"65851c49-99be-44af-a0c7-c2d851fb56d7\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.341409 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-svc\") pod \"65851c49-99be-44af-a0c7-c2d851fb56d7\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.341434 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-config\") pod \"65851c49-99be-44af-a0c7-c2d851fb56d7\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.341460 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-nb\") pod \"65851c49-99be-44af-a0c7-c2d851fb56d7\" (UID: \"65851c49-99be-44af-a0c7-c2d851fb56d7\") " Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.346891 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65851c49-99be-44af-a0c7-c2d851fb56d7-kube-api-access-9ds96" (OuterVolumeSpecName: "kube-api-access-9ds96") pod "65851c49-99be-44af-a0c7-c2d851fb56d7" (UID: "65851c49-99be-44af-a0c7-c2d851fb56d7"). InnerVolumeSpecName "kube-api-access-9ds96". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.396996 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "65851c49-99be-44af-a0c7-c2d851fb56d7" (UID: "65851c49-99be-44af-a0c7-c2d851fb56d7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.399289 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "65851c49-99be-44af-a0c7-c2d851fb56d7" (UID: "65851c49-99be-44af-a0c7-c2d851fb56d7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.404040 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "65851c49-99be-44af-a0c7-c2d851fb56d7" (UID: "65851c49-99be-44af-a0c7-c2d851fb56d7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.404486 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-config" (OuterVolumeSpecName: "config") pod "65851c49-99be-44af-a0c7-c2d851fb56d7" (UID: "65851c49-99be-44af-a0c7-c2d851fb56d7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.404483 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "65851c49-99be-44af-a0c7-c2d851fb56d7" (UID: "65851c49-99be-44af-a0c7-c2d851fb56d7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.418516 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "65851c49-99be-44af-a0c7-c2d851fb56d7" (UID: "65851c49-99be-44af-a0c7-c2d851fb56d7"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.443520 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.443574 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.443587 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.443601 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ds96\" (UniqueName: \"kubernetes.io/projected/65851c49-99be-44af-a0c7-c2d851fb56d7-kube-api-access-9ds96\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.443617 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.443628 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.443637 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65851c49-99be-44af-a0c7-c2d851fb56d7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.877555 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"b5d94ad72d0ad56a4be3ea7e313a497520093895050d2bae5908fcce140bb8a3"} Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.879628 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" event={"ID":"65851c49-99be-44af-a0c7-c2d851fb56d7","Type":"ContainerDied","Data":"62ee40eb99a9ed975bfd8f4aed14b9b70c1931ac5d4f77c3b384c658da40a028"} Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.879666 4747 scope.go:117] "RemoveContainer" containerID="130784e41bceb93ff5f571442a75fc6b621292fdef64a86db9d580d1d87d8678" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.879746 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-qzj79" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.902771 4747 scope.go:117] "RemoveContainer" containerID="0b4a1708f84828ec14d7bf6f0a2e24eaa1e610541502f1dd58085771b95ed0ed" Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.934132 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-qzj79"] Feb 02 09:15:51 crc kubenswrapper[4747]: I0202 09:15:51.950778 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-qzj79"] Feb 02 09:15:52 crc kubenswrapper[4747]: I0202 09:15:52.357474 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65851c49-99be-44af-a0c7-c2d851fb56d7" path="/var/lib/kubelet/pods/65851c49-99be-44af-a0c7-c2d851fb56d7/volumes" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.513043 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr"] Feb 02 09:16:03 crc kubenswrapper[4747]: E0202 09:16:03.514181 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerName="init" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.514202 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerName="init" Feb 02 09:16:03 crc kubenswrapper[4747]: E0202 09:16:03.514229 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerName="dnsmasq-dns" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.514239 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerName="dnsmasq-dns" Feb 02 09:16:03 crc kubenswrapper[4747]: E0202 09:16:03.514259 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerName="dnsmasq-dns" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.514268 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerName="dnsmasq-dns" Feb 02 09:16:03 crc kubenswrapper[4747]: E0202 09:16:03.514308 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerName="init" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.514320 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerName="init" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.514640 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fea7ef4-c513-4ded-93f4-689653dc8990" containerName="dnsmasq-dns" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.514673 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="65851c49-99be-44af-a0c7-c2d851fb56d7" containerName="dnsmasq-dns" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.515680 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.521293 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.521605 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.521764 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.527175 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.535516 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr"] Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.588991 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xr8t\" (UniqueName: \"kubernetes.io/projected/f572a805-a83f-44fa-a82c-dcbd3b154be6-kube-api-access-5xr8t\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.589063 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.589294 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.589680 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.692527 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.692634 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xr8t\" (UniqueName: \"kubernetes.io/projected/f572a805-a83f-44fa-a82c-dcbd3b154be6-kube-api-access-5xr8t\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.692666 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.692697 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.698164 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.698707 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.700923 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.710649 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xr8t\" (UniqueName: \"kubernetes.io/projected/f572a805-a83f-44fa-a82c-dcbd3b154be6-kube-api-access-5xr8t\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:03 crc kubenswrapper[4747]: I0202 09:16:03.852083 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:04 crc kubenswrapper[4747]: I0202 09:16:04.004754 4747 generic.go:334] "Generic (PLEG): container finished" podID="ff8e9063-bf18-45e0-92ef-81bc7eee9d50" containerID="388554bfde294bb65a45b65ee38fe8238f1b87fe1ee68b1173bf6aea7d39b0d1" exitCode=0 Feb 02 09:16:04 crc kubenswrapper[4747]: I0202 09:16:04.004817 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff8e9063-bf18-45e0-92ef-81bc7eee9d50","Type":"ContainerDied","Data":"388554bfde294bb65a45b65ee38fe8238f1b87fe1ee68b1173bf6aea7d39b0d1"} Feb 02 09:16:04 crc kubenswrapper[4747]: I0202 09:16:04.428005 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr"] Feb 02 09:16:04 crc kubenswrapper[4747]: W0202 09:16:04.434861 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf572a805_a83f_44fa_a82c_dcbd3b154be6.slice/crio-b7f6c70f6734af0a9621d354b0e23bea02cd8e5fa5244bd766f3ea62a94341f4 WatchSource:0}: Error finding container b7f6c70f6734af0a9621d354b0e23bea02cd8e5fa5244bd766f3ea62a94341f4: Status 404 returned error can't find the container with id b7f6c70f6734af0a9621d354b0e23bea02cd8e5fa5244bd766f3ea62a94341f4 Feb 02 09:16:04 crc kubenswrapper[4747]: I0202 09:16:04.437229 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:16:05 crc kubenswrapper[4747]: I0202 09:16:05.021523 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" event={"ID":"f572a805-a83f-44fa-a82c-dcbd3b154be6","Type":"ContainerStarted","Data":"b7f6c70f6734af0a9621d354b0e23bea02cd8e5fa5244bd766f3ea62a94341f4"} Feb 02 09:16:05 crc kubenswrapper[4747]: I0202 09:16:05.024671 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff8e9063-bf18-45e0-92ef-81bc7eee9d50","Type":"ContainerStarted","Data":"c0ec10da2935f6c591c540258873f537bb0254482f83ec76b291ec322a5729f9"} Feb 02 09:16:05 crc kubenswrapper[4747]: I0202 09:16:05.025252 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 09:16:05 crc kubenswrapper[4747]: I0202 09:16:05.062443 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.062418504 podStartE2EDuration="37.062418504s" podCreationTimestamp="2026-02-02 09:15:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:16:05.052399167 +0000 UTC m=+1177.596737610" watchObservedRunningTime="2026-02-02 09:16:05.062418504 +0000 UTC m=+1177.606756937" Feb 02 09:16:07 crc kubenswrapper[4747]: I0202 09:16:07.050947 4747 generic.go:334] "Generic (PLEG): container finished" podID="7eee7ec2-61e8-40b8-86c6-618d811a6b58" containerID="46269bde7bb1fafb7264ffb231ab2d086f2c7460fe84156dd80718da7f0ab521" exitCode=0 Feb 02 09:16:07 crc kubenswrapper[4747]: I0202 09:16:07.051067 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7eee7ec2-61e8-40b8-86c6-618d811a6b58","Type":"ContainerDied","Data":"46269bde7bb1fafb7264ffb231ab2d086f2c7460fe84156dd80718da7f0ab521"} Feb 02 09:16:08 crc kubenswrapper[4747]: I0202 09:16:08.069857 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7eee7ec2-61e8-40b8-86c6-618d811a6b58","Type":"ContainerStarted","Data":"786a0f664192415b9671b4378255f9644b578309b64a99590c85048a61fcdc2e"} Feb 02 09:16:08 crc kubenswrapper[4747]: I0202 09:16:08.070330 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:16:08 crc kubenswrapper[4747]: I0202 09:16:08.102001 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.101980007 podStartE2EDuration="37.101980007s" podCreationTimestamp="2026-02-02 09:15:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:16:08.095423119 +0000 UTC m=+1180.639761572" watchObservedRunningTime="2026-02-02 09:16:08.101980007 +0000 UTC m=+1180.646318450" Feb 02 09:16:17 crc kubenswrapper[4747]: I0202 09:16:17.173744 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" event={"ID":"f572a805-a83f-44fa-a82c-dcbd3b154be6","Type":"ContainerStarted","Data":"2cbba35d11ab801d875f8c0f36923d4689933ff0660822c9fa069cb1ab1492a0"} Feb 02 09:16:17 crc kubenswrapper[4747]: I0202 09:16:17.198427 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" podStartSLOduration=2.369539484 podStartE2EDuration="14.198406136s" podCreationTimestamp="2026-02-02 09:16:03 +0000 UTC" firstStartedPulling="2026-02-02 09:16:04.437025348 +0000 UTC m=+1176.981363781" lastFinishedPulling="2026-02-02 09:16:16.265892 +0000 UTC m=+1188.810230433" observedRunningTime="2026-02-02 09:16:17.193633553 +0000 UTC m=+1189.737971996" watchObservedRunningTime="2026-02-02 09:16:17.198406136 +0000 UTC m=+1189.742744559" Feb 02 09:16:18 crc kubenswrapper[4747]: I0202 09:16:18.973191 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 09:16:21 crc kubenswrapper[4747]: I0202 09:16:21.563197 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 09:16:27 crc kubenswrapper[4747]: I0202 09:16:27.263743 4747 generic.go:334] "Generic (PLEG): container finished" podID="f572a805-a83f-44fa-a82c-dcbd3b154be6" containerID="2cbba35d11ab801d875f8c0f36923d4689933ff0660822c9fa069cb1ab1492a0" exitCode=0 Feb 02 09:16:27 crc kubenswrapper[4747]: I0202 09:16:27.264300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" event={"ID":"f572a805-a83f-44fa-a82c-dcbd3b154be6","Type":"ContainerDied","Data":"2cbba35d11ab801d875f8c0f36923d4689933ff0660822c9fa069cb1ab1492a0"} Feb 02 09:16:28 crc kubenswrapper[4747]: I0202 09:16:28.855323 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:28 crc kubenswrapper[4747]: I0202 09:16:28.982887 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-ssh-key-openstack-edpm-ipam\") pod \"f572a805-a83f-44fa-a82c-dcbd3b154be6\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " Feb 02 09:16:28 crc kubenswrapper[4747]: I0202 09:16:28.983373 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-repo-setup-combined-ca-bundle\") pod \"f572a805-a83f-44fa-a82c-dcbd3b154be6\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " Feb 02 09:16:28 crc kubenswrapper[4747]: I0202 09:16:28.983566 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-inventory\") pod \"f572a805-a83f-44fa-a82c-dcbd3b154be6\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " Feb 02 09:16:28 crc kubenswrapper[4747]: I0202 09:16:28.983714 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xr8t\" (UniqueName: \"kubernetes.io/projected/f572a805-a83f-44fa-a82c-dcbd3b154be6-kube-api-access-5xr8t\") pod \"f572a805-a83f-44fa-a82c-dcbd3b154be6\" (UID: \"f572a805-a83f-44fa-a82c-dcbd3b154be6\") " Feb 02 09:16:28 crc kubenswrapper[4747]: I0202 09:16:28.988518 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f572a805-a83f-44fa-a82c-dcbd3b154be6" (UID: "f572a805-a83f-44fa-a82c-dcbd3b154be6"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:16:28 crc kubenswrapper[4747]: I0202 09:16:28.991078 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f572a805-a83f-44fa-a82c-dcbd3b154be6-kube-api-access-5xr8t" (OuterVolumeSpecName: "kube-api-access-5xr8t") pod "f572a805-a83f-44fa-a82c-dcbd3b154be6" (UID: "f572a805-a83f-44fa-a82c-dcbd3b154be6"). InnerVolumeSpecName "kube-api-access-5xr8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.014327 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-inventory" (OuterVolumeSpecName: "inventory") pod "f572a805-a83f-44fa-a82c-dcbd3b154be6" (UID: "f572a805-a83f-44fa-a82c-dcbd3b154be6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.014912 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f572a805-a83f-44fa-a82c-dcbd3b154be6" (UID: "f572a805-a83f-44fa-a82c-dcbd3b154be6"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.085459 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.085506 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xr8t\" (UniqueName: \"kubernetes.io/projected/f572a805-a83f-44fa-a82c-dcbd3b154be6-kube-api-access-5xr8t\") on node \"crc\" DevicePath \"\"" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.085521 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.085535 4747 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f572a805-a83f-44fa-a82c-dcbd3b154be6-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.281212 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" event={"ID":"f572a805-a83f-44fa-a82c-dcbd3b154be6","Type":"ContainerDied","Data":"b7f6c70f6734af0a9621d354b0e23bea02cd8e5fa5244bd766f3ea62a94341f4"} Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.281464 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7f6c70f6734af0a9621d354b0e23bea02cd8e5fa5244bd766f3ea62a94341f4" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.281266 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.369889 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr"] Feb 02 09:16:29 crc kubenswrapper[4747]: E0202 09:16:29.370358 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f572a805-a83f-44fa-a82c-dcbd3b154be6" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.370377 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f572a805-a83f-44fa-a82c-dcbd3b154be6" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.370607 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f572a805-a83f-44fa-a82c-dcbd3b154be6" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.371387 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.373722 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.374244 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.374521 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.374525 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.392652 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr"] Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.493184 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z4z2\" (UniqueName: \"kubernetes.io/projected/cde86100-fdda-4f1b-b549-b87b483e3859-kube-api-access-8z4z2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.493349 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.493506 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.595361 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.595465 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.595542 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z4z2\" (UniqueName: \"kubernetes.io/projected/cde86100-fdda-4f1b-b549-b87b483e3859-kube-api-access-8z4z2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.602645 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.603782 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.620460 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z4z2\" (UniqueName: \"kubernetes.io/projected/cde86100-fdda-4f1b-b549-b87b483e3859-kube-api-access-8z4z2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-jwfdr\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:29 crc kubenswrapper[4747]: I0202 09:16:29.692982 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:30 crc kubenswrapper[4747]: I0202 09:16:30.221995 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr"] Feb 02 09:16:30 crc kubenswrapper[4747]: I0202 09:16:30.290551 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" event={"ID":"cde86100-fdda-4f1b-b549-b87b483e3859","Type":"ContainerStarted","Data":"edd105893eab4085a0a3320cab9294e5cb7daa115cb28d32f8e6d3fd4f93e589"} Feb 02 09:16:31 crc kubenswrapper[4747]: I0202 09:16:31.302206 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" event={"ID":"cde86100-fdda-4f1b-b549-b87b483e3859","Type":"ContainerStarted","Data":"2947e2c711a6a85bca8f654f8ddfcdb107960338556f8c445dafdff7fef2d199"} Feb 02 09:16:31 crc kubenswrapper[4747]: I0202 09:16:31.325480 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" podStartSLOduration=1.903837925 podStartE2EDuration="2.325456016s" podCreationTimestamp="2026-02-02 09:16:29 +0000 UTC" firstStartedPulling="2026-02-02 09:16:30.221506196 +0000 UTC m=+1202.765844629" lastFinishedPulling="2026-02-02 09:16:30.643124267 +0000 UTC m=+1203.187462720" observedRunningTime="2026-02-02 09:16:31.322285564 +0000 UTC m=+1203.866623997" watchObservedRunningTime="2026-02-02 09:16:31.325456016 +0000 UTC m=+1203.869794449" Feb 02 09:16:33 crc kubenswrapper[4747]: I0202 09:16:33.325681 4747 generic.go:334] "Generic (PLEG): container finished" podID="cde86100-fdda-4f1b-b549-b87b483e3859" containerID="2947e2c711a6a85bca8f654f8ddfcdb107960338556f8c445dafdff7fef2d199" exitCode=0 Feb 02 09:16:33 crc kubenswrapper[4747]: I0202 09:16:33.325757 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" event={"ID":"cde86100-fdda-4f1b-b549-b87b483e3859","Type":"ContainerDied","Data":"2947e2c711a6a85bca8f654f8ddfcdb107960338556f8c445dafdff7fef2d199"} Feb 02 09:16:34 crc kubenswrapper[4747]: I0202 09:16:34.790872 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:34 crc kubenswrapper[4747]: I0202 09:16:34.900079 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-inventory\") pod \"cde86100-fdda-4f1b-b549-b87b483e3859\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " Feb 02 09:16:34 crc kubenswrapper[4747]: I0202 09:16:34.900187 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8z4z2\" (UniqueName: \"kubernetes.io/projected/cde86100-fdda-4f1b-b549-b87b483e3859-kube-api-access-8z4z2\") pod \"cde86100-fdda-4f1b-b549-b87b483e3859\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " Feb 02 09:16:34 crc kubenswrapper[4747]: I0202 09:16:34.900285 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-ssh-key-openstack-edpm-ipam\") pod \"cde86100-fdda-4f1b-b549-b87b483e3859\" (UID: \"cde86100-fdda-4f1b-b549-b87b483e3859\") " Feb 02 09:16:34 crc kubenswrapper[4747]: I0202 09:16:34.905049 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cde86100-fdda-4f1b-b549-b87b483e3859-kube-api-access-8z4z2" (OuterVolumeSpecName: "kube-api-access-8z4z2") pod "cde86100-fdda-4f1b-b549-b87b483e3859" (UID: "cde86100-fdda-4f1b-b549-b87b483e3859"). InnerVolumeSpecName "kube-api-access-8z4z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:16:34 crc kubenswrapper[4747]: I0202 09:16:34.927558 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "cde86100-fdda-4f1b-b549-b87b483e3859" (UID: "cde86100-fdda-4f1b-b549-b87b483e3859"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:16:34 crc kubenswrapper[4747]: I0202 09:16:34.939561 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-inventory" (OuterVolumeSpecName: "inventory") pod "cde86100-fdda-4f1b-b549-b87b483e3859" (UID: "cde86100-fdda-4f1b-b549-b87b483e3859"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.002708 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.002749 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cde86100-fdda-4f1b-b549-b87b483e3859-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.002762 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8z4z2\" (UniqueName: \"kubernetes.io/projected/cde86100-fdda-4f1b-b549-b87b483e3859-kube-api-access-8z4z2\") on node \"crc\" DevicePath \"\"" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.353679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" event={"ID":"cde86100-fdda-4f1b-b549-b87b483e3859","Type":"ContainerDied","Data":"edd105893eab4085a0a3320cab9294e5cb7daa115cb28d32f8e6d3fd4f93e589"} Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.354075 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edd105893eab4085a0a3320cab9294e5cb7daa115cb28d32f8e6d3fd4f93e589" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.353797 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-jwfdr" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.433021 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t"] Feb 02 09:16:35 crc kubenswrapper[4747]: E0202 09:16:35.434208 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cde86100-fdda-4f1b-b549-b87b483e3859" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.434320 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cde86100-fdda-4f1b-b549-b87b483e3859" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.434826 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cde86100-fdda-4f1b-b549-b87b483e3859" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.436063 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.437855 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.438448 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.438794 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.439079 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.443841 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t"] Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.521184 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.521470 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.521596 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.521886 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk5s2\" (UniqueName: \"kubernetes.io/projected/5f3a380a-d55f-4522-a962-4003519edb27-kube-api-access-gk5s2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.623880 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.623973 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.624041 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.624118 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk5s2\" (UniqueName: \"kubernetes.io/projected/5f3a380a-d55f-4522-a962-4003519edb27-kube-api-access-gk5s2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.633245 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.634076 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.636041 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.641211 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk5s2\" (UniqueName: \"kubernetes.io/projected/5f3a380a-d55f-4522-a962-4003519edb27-kube-api-access-gk5s2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:35 crc kubenswrapper[4747]: I0202 09:16:35.796321 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:16:36 crc kubenswrapper[4747]: I0202 09:16:36.330645 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t"] Feb 02 09:16:36 crc kubenswrapper[4747]: I0202 09:16:36.365798 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" event={"ID":"5f3a380a-d55f-4522-a962-4003519edb27","Type":"ContainerStarted","Data":"480e7069a599ecb33480e7b40eab6fa5d3e870b70eaea1747192e26cabc4777e"} Feb 02 09:16:37 crc kubenswrapper[4747]: I0202 09:16:37.375883 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" event={"ID":"5f3a380a-d55f-4522-a962-4003519edb27","Type":"ContainerStarted","Data":"67ddb01865ad85ac9d48791e4e2f7c4780a7c53906f989c1996191564c75576f"} Feb 02 09:16:37 crc kubenswrapper[4747]: I0202 09:16:37.397564 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" podStartSLOduration=2.006697721 podStartE2EDuration="2.397547531s" podCreationTimestamp="2026-02-02 09:16:35 +0000 UTC" firstStartedPulling="2026-02-02 09:16:36.337016347 +0000 UTC m=+1208.881354780" lastFinishedPulling="2026-02-02 09:16:36.727866157 +0000 UTC m=+1209.272204590" observedRunningTime="2026-02-02 09:16:37.392556433 +0000 UTC m=+1209.936894866" watchObservedRunningTime="2026-02-02 09:16:37.397547531 +0000 UTC m=+1209.941885954" Feb 02 09:17:05 crc kubenswrapper[4747]: I0202 09:17:05.534603 4747 scope.go:117] "RemoveContainer" containerID="d27ad240800257295ba6b49f728a7dfec10b39ed80fb0d1377f15e9eff379b5e" Feb 02 09:18:05 crc kubenswrapper[4747]: I0202 09:18:05.595087 4747 scope.go:117] "RemoveContainer" containerID="2e6d43ec000cb195198b0d8a511ea959a31f954fd296ff8f6cbfc2eb5fec4755" Feb 02 09:18:05 crc kubenswrapper[4747]: I0202 09:18:05.640160 4747 scope.go:117] "RemoveContainer" containerID="79285de4a6d3b493c36dbf68549cb562a6eb49ec7d8c601bc5e254cf272f1d7b" Feb 02 09:18:20 crc kubenswrapper[4747]: I0202 09:18:20.519374 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:18:20 crc kubenswrapper[4747]: I0202 09:18:20.520259 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:18:50 crc kubenswrapper[4747]: I0202 09:18:50.518837 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:18:50 crc kubenswrapper[4747]: I0202 09:18:50.519515 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.518575 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.520909 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.521151 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.522294 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b5d94ad72d0ad56a4be3ea7e313a497520093895050d2bae5908fcce140bb8a3"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.522582 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://b5d94ad72d0ad56a4be3ea7e313a497520093895050d2bae5908fcce140bb8a3" gracePeriod=600 Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.891399 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="b5d94ad72d0ad56a4be3ea7e313a497520093895050d2bae5908fcce140bb8a3" exitCode=0 Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.891484 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"b5d94ad72d0ad56a4be3ea7e313a497520093895050d2bae5908fcce140bb8a3"} Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.891886 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5"} Feb 02 09:19:20 crc kubenswrapper[4747]: I0202 09:19:20.891910 4747 scope.go:117] "RemoveContainer" containerID="cfcdbd71d081b839dee0ed836834b42e47c3f661c3c5fc464d12e55a08f08627" Feb 02 09:19:22 crc kubenswrapper[4747]: I0202 09:19:22.913250 4747 generic.go:334] "Generic (PLEG): container finished" podID="5f3a380a-d55f-4522-a962-4003519edb27" containerID="67ddb01865ad85ac9d48791e4e2f7c4780a7c53906f989c1996191564c75576f" exitCode=0 Feb 02 09:19:22 crc kubenswrapper[4747]: I0202 09:19:22.913377 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" event={"ID":"5f3a380a-d55f-4522-a962-4003519edb27","Type":"ContainerDied","Data":"67ddb01865ad85ac9d48791e4e2f7c4780a7c53906f989c1996191564c75576f"} Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.346893 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.527186 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-bootstrap-combined-ca-bundle\") pod \"5f3a380a-d55f-4522-a962-4003519edb27\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.527359 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-ssh-key-openstack-edpm-ipam\") pod \"5f3a380a-d55f-4522-a962-4003519edb27\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.527459 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk5s2\" (UniqueName: \"kubernetes.io/projected/5f3a380a-d55f-4522-a962-4003519edb27-kube-api-access-gk5s2\") pod \"5f3a380a-d55f-4522-a962-4003519edb27\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.527489 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-inventory\") pod \"5f3a380a-d55f-4522-a962-4003519edb27\" (UID: \"5f3a380a-d55f-4522-a962-4003519edb27\") " Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.534290 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "5f3a380a-d55f-4522-a962-4003519edb27" (UID: "5f3a380a-d55f-4522-a962-4003519edb27"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.540302 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f3a380a-d55f-4522-a962-4003519edb27-kube-api-access-gk5s2" (OuterVolumeSpecName: "kube-api-access-gk5s2") pod "5f3a380a-d55f-4522-a962-4003519edb27" (UID: "5f3a380a-d55f-4522-a962-4003519edb27"). InnerVolumeSpecName "kube-api-access-gk5s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.569185 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-inventory" (OuterVolumeSpecName: "inventory") pod "5f3a380a-d55f-4522-a962-4003519edb27" (UID: "5f3a380a-d55f-4522-a962-4003519edb27"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.574707 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "5f3a380a-d55f-4522-a962-4003519edb27" (UID: "5f3a380a-d55f-4522-a962-4003519edb27"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.630476 4747 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.630517 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.630531 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk5s2\" (UniqueName: \"kubernetes.io/projected/5f3a380a-d55f-4522-a962-4003519edb27-kube-api-access-gk5s2\") on node \"crc\" DevicePath \"\"" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.630542 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f3a380a-d55f-4522-a962-4003519edb27-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.936058 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" event={"ID":"5f3a380a-d55f-4522-a962-4003519edb27","Type":"ContainerDied","Data":"480e7069a599ecb33480e7b40eab6fa5d3e870b70eaea1747192e26cabc4777e"} Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.936106 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="480e7069a599ecb33480e7b40eab6fa5d3e870b70eaea1747192e26cabc4777e" Feb 02 09:19:24 crc kubenswrapper[4747]: I0202 09:19:24.936159 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.030175 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j"] Feb 02 09:19:25 crc kubenswrapper[4747]: E0202 09:19:25.030557 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f3a380a-d55f-4522-a962-4003519edb27" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.030575 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f3a380a-d55f-4522-a962-4003519edb27" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.030764 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f3a380a-d55f-4522-a962-4003519edb27" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.031418 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.033883 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.034265 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.034341 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.034663 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.048504 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j"] Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.139179 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2rxn\" (UniqueName: \"kubernetes.io/projected/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-kube-api-access-g2rxn\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.139395 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.139451 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.241520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2rxn\" (UniqueName: \"kubernetes.io/projected/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-kube-api-access-g2rxn\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.241732 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.241818 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.248351 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.248513 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.260323 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2rxn\" (UniqueName: \"kubernetes.io/projected/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-kube-api-access-g2rxn\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tql4j\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.355890 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.872023 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j"] Feb 02 09:19:25 crc kubenswrapper[4747]: W0202 09:19:25.874323 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb18e7fd9_2c50_4d27_b7ab_c525aa31a768.slice/crio-622a5e1ec343d5afacc2bb6ecb97cc1feba9ed894d919fb3406f6080ee1f8adf WatchSource:0}: Error finding container 622a5e1ec343d5afacc2bb6ecb97cc1feba9ed894d919fb3406f6080ee1f8adf: Status 404 returned error can't find the container with id 622a5e1ec343d5afacc2bb6ecb97cc1feba9ed894d919fb3406f6080ee1f8adf Feb 02 09:19:25 crc kubenswrapper[4747]: I0202 09:19:25.946754 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" event={"ID":"b18e7fd9-2c50-4d27-b7ab-c525aa31a768","Type":"ContainerStarted","Data":"622a5e1ec343d5afacc2bb6ecb97cc1feba9ed894d919fb3406f6080ee1f8adf"} Feb 02 09:19:26 crc kubenswrapper[4747]: I0202 09:19:26.957850 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" event={"ID":"b18e7fd9-2c50-4d27-b7ab-c525aa31a768","Type":"ContainerStarted","Data":"73149389a86f6ae88f3c9881ca531dfc12200f5bb6c23085894a195743c6f12a"} Feb 02 09:19:26 crc kubenswrapper[4747]: I0202 09:19:26.987415 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" podStartSLOduration=1.386222861 podStartE2EDuration="1.987389154s" podCreationTimestamp="2026-02-02 09:19:25 +0000 UTC" firstStartedPulling="2026-02-02 09:19:25.877071818 +0000 UTC m=+1378.421410261" lastFinishedPulling="2026-02-02 09:19:26.478238121 +0000 UTC m=+1379.022576554" observedRunningTime="2026-02-02 09:19:26.973855603 +0000 UTC m=+1379.518194076" watchObservedRunningTime="2026-02-02 09:19:26.987389154 +0000 UTC m=+1379.531727587" Feb 02 09:20:05 crc kubenswrapper[4747]: I0202 09:20:05.748719 4747 scope.go:117] "RemoveContainer" containerID="5fa6f58e477044f50a64a1e70f639861cd2a105fa9fede5a8869f21882226100" Feb 02 09:20:05 crc kubenswrapper[4747]: I0202 09:20:05.787189 4747 scope.go:117] "RemoveContainer" containerID="680fc39bdc18300762da33ddc18b525ece6d207ed4ea281091cc9ae502680fbe" Feb 02 09:20:05 crc kubenswrapper[4747]: I0202 09:20:05.846135 4747 scope.go:117] "RemoveContainer" containerID="815c772183b22ce0317b076c24ed835a1dc87da689f83d13ab6bb2efa2c3f06a" Feb 02 09:20:48 crc kubenswrapper[4747]: I0202 09:20:48.729800 4747 generic.go:334] "Generic (PLEG): container finished" podID="b18e7fd9-2c50-4d27-b7ab-c525aa31a768" containerID="73149389a86f6ae88f3c9881ca531dfc12200f5bb6c23085894a195743c6f12a" exitCode=0 Feb 02 09:20:48 crc kubenswrapper[4747]: I0202 09:20:48.730002 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" event={"ID":"b18e7fd9-2c50-4d27-b7ab-c525aa31a768","Type":"ContainerDied","Data":"73149389a86f6ae88f3c9881ca531dfc12200f5bb6c23085894a195743c6f12a"} Feb 02 09:20:49 crc kubenswrapper[4747]: I0202 09:20:49.051614 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-f601-account-create-update-98lbw"] Feb 02 09:20:49 crc kubenswrapper[4747]: I0202 09:20:49.063626 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-d8hcv"] Feb 02 09:20:49 crc kubenswrapper[4747]: I0202 09:20:49.075167 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-f601-account-create-update-98lbw"] Feb 02 09:20:49 crc kubenswrapper[4747]: I0202 09:20:49.086841 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-d8hcv"] Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.160719 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.214493 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-ssh-key-openstack-edpm-ipam\") pod \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.214576 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-inventory\") pod \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.214771 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2rxn\" (UniqueName: \"kubernetes.io/projected/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-kube-api-access-g2rxn\") pod \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\" (UID: \"b18e7fd9-2c50-4d27-b7ab-c525aa31a768\") " Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.245347 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-kube-api-access-g2rxn" (OuterVolumeSpecName: "kube-api-access-g2rxn") pod "b18e7fd9-2c50-4d27-b7ab-c525aa31a768" (UID: "b18e7fd9-2c50-4d27-b7ab-c525aa31a768"). InnerVolumeSpecName "kube-api-access-g2rxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.290187 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-inventory" (OuterVolumeSpecName: "inventory") pod "b18e7fd9-2c50-4d27-b7ab-c525aa31a768" (UID: "b18e7fd9-2c50-4d27-b7ab-c525aa31a768"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.316438 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2rxn\" (UniqueName: \"kubernetes.io/projected/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-kube-api-access-g2rxn\") on node \"crc\" DevicePath \"\"" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.316470 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.403749 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b18e7fd9-2c50-4d27-b7ab-c525aa31a768" (UID: "b18e7fd9-2c50-4d27-b7ab-c525aa31a768"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.416474 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d1d042f-43a4-4ca2-9fbd-259c19c488bb" path="/var/lib/kubelet/pods/3d1d042f-43a4-4ca2-9fbd-259c19c488bb/volumes" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.417152 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="981047ad-5ae2-4e3a-ad38-ecfa32e93664" path="/var/lib/kubelet/pods/981047ad-5ae2-4e3a-ad38-ecfa32e93664/volumes" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.418464 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b18e7fd9-2c50-4d27-b7ab-c525aa31a768-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.779682 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" event={"ID":"b18e7fd9-2c50-4d27-b7ab-c525aa31a768","Type":"ContainerDied","Data":"622a5e1ec343d5afacc2bb6ecb97cc1feba9ed894d919fb3406f6080ee1f8adf"} Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.780105 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="622a5e1ec343d5afacc2bb6ecb97cc1feba9ed894d919fb3406f6080ee1f8adf" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.780200 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tql4j" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.843898 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2"] Feb 02 09:20:50 crc kubenswrapper[4747]: E0202 09:20:50.844451 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b18e7fd9-2c50-4d27-b7ab-c525aa31a768" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.844478 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b18e7fd9-2c50-4d27-b7ab-c525aa31a768" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.844705 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b18e7fd9-2c50-4d27-b7ab-c525aa31a768" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.845416 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.847451 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.848208 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.848661 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.849119 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.853697 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2"] Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.925405 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74gkr\" (UniqueName: \"kubernetes.io/projected/b9b9686a-9848-43fb-90fd-2986e723d282-kube-api-access-74gkr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.925482 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:50 crc kubenswrapper[4747]: I0202 09:20:50.925693 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.027334 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.027463 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74gkr\" (UniqueName: \"kubernetes.io/projected/b9b9686a-9848-43fb-90fd-2986e723d282-kube-api-access-74gkr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.027495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.031790 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.032051 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.043036 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74gkr\" (UniqueName: \"kubernetes.io/projected/b9b9686a-9848-43fb-90fd-2986e723d282-kube-api-access-74gkr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-phpk2\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.172773 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.732730 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2"] Feb 02 09:20:51 crc kubenswrapper[4747]: I0202 09:20:51.789911 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" event={"ID":"b9b9686a-9848-43fb-90fd-2986e723d282","Type":"ContainerStarted","Data":"0c597a8460a803896e9fa13cf6cf1d4a23346ef2bdc3423738909cce7bf321d0"} Feb 02 09:20:52 crc kubenswrapper[4747]: I0202 09:20:52.801460 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" event={"ID":"b9b9686a-9848-43fb-90fd-2986e723d282","Type":"ContainerStarted","Data":"d005f2ac2c851ad9b247e22b9cd0ecf4dc5a5ffc6df7330a1a157a33a237ec66"} Feb 02 09:20:52 crc kubenswrapper[4747]: I0202 09:20:52.828836 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" podStartSLOduration=2.361645959 podStartE2EDuration="2.828813925s" podCreationTimestamp="2026-02-02 09:20:50 +0000 UTC" firstStartedPulling="2026-02-02 09:20:51.74117155 +0000 UTC m=+1464.285509973" lastFinishedPulling="2026-02-02 09:20:52.208339506 +0000 UTC m=+1464.752677939" observedRunningTime="2026-02-02 09:20:52.821194293 +0000 UTC m=+1465.365532736" watchObservedRunningTime="2026-02-02 09:20:52.828813925 +0000 UTC m=+1465.373152358" Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.028151 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-q8tlv"] Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.039760 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-4qhvd"] Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.050016 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-3187-account-create-update-p79p5"] Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.058283 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-q8tlv"] Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.065207 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-4qhvd"] Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.071835 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-44ee-account-create-update-zqfb7"] Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.080611 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-3187-account-create-update-p79p5"] Feb 02 09:20:55 crc kubenswrapper[4747]: I0202 09:20:55.088123 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-44ee-account-create-update-zqfb7"] Feb 02 09:20:56 crc kubenswrapper[4747]: I0202 09:20:56.353672 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="099ed0a4-89ce-418e-88e5-e93b2831cf94" path="/var/lib/kubelet/pods/099ed0a4-89ce-418e-88e5-e93b2831cf94/volumes" Feb 02 09:20:56 crc kubenswrapper[4747]: I0202 09:20:56.354866 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="168afe57-1b0e-4ace-8565-c535f289fdfe" path="/var/lib/kubelet/pods/168afe57-1b0e-4ace-8565-c535f289fdfe/volumes" Feb 02 09:20:56 crc kubenswrapper[4747]: I0202 09:20:56.355768 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3684c04e-f715-4281-b323-5d0097ddded9" path="/var/lib/kubelet/pods/3684c04e-f715-4281-b323-5d0097ddded9/volumes" Feb 02 09:20:56 crc kubenswrapper[4747]: I0202 09:20:56.356664 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e16f96e6-5142-4edf-81bb-0e8eb79728b4" path="/var/lib/kubelet/pods/e16f96e6-5142-4edf-81bb-0e8eb79728b4/volumes" Feb 02 09:21:05 crc kubenswrapper[4747]: I0202 09:21:05.937385 4747 scope.go:117] "RemoveContainer" containerID="ea1c9906bef1bf8dc273d88b385e2a017547e031a15b9d6b4e2051a29ef0997e" Feb 02 09:21:05 crc kubenswrapper[4747]: I0202 09:21:05.959754 4747 scope.go:117] "RemoveContainer" containerID="91553e331e304db14bb31250696b4c88689b059278cfc2f7447c55d73d66936f" Feb 02 09:21:06 crc kubenswrapper[4747]: I0202 09:21:06.024576 4747 scope.go:117] "RemoveContainer" containerID="d72595de54c883632e35d328836897e3becab0dfb01220d4d609d9a29d04dcee" Feb 02 09:21:06 crc kubenswrapper[4747]: I0202 09:21:06.068605 4747 scope.go:117] "RemoveContainer" containerID="b77d14a5bf0f049adab94d00d0917dcae42b3c5107d5b662f166e4e007816623" Feb 02 09:21:06 crc kubenswrapper[4747]: I0202 09:21:06.119799 4747 scope.go:117] "RemoveContainer" containerID="d3fd3f7212c6604ac79285e6831b88f56e04ec79bab1c27c4699713c115b5a3d" Feb 02 09:21:06 crc kubenswrapper[4747]: I0202 09:21:06.168079 4747 scope.go:117] "RemoveContainer" containerID="e9651f3b3df2df63c74d2dded7b40633fa9db6d7f997db39001e13635b17845a" Feb 02 09:21:07 crc kubenswrapper[4747]: I0202 09:21:07.029205 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-pggdq"] Feb 02 09:21:07 crc kubenswrapper[4747]: I0202 09:21:07.040151 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-pggdq"] Feb 02 09:21:08 crc kubenswrapper[4747]: I0202 09:21:08.349678 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea9cec41-47c9-4a62-a268-aa2f8e9996b0" path="/var/lib/kubelet/pods/ea9cec41-47c9-4a62-a268-aa2f8e9996b0/volumes" Feb 02 09:21:12 crc kubenswrapper[4747]: I0202 09:21:12.050622 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-5xc45"] Feb 02 09:21:12 crc kubenswrapper[4747]: I0202 09:21:12.065862 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-5xc45"] Feb 02 09:21:12 crc kubenswrapper[4747]: I0202 09:21:12.353507 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885" path="/var/lib/kubelet/pods/d9a5900f-c2c6-4ec0-b0c0-3f3ef976a885/volumes" Feb 02 09:21:20 crc kubenswrapper[4747]: I0202 09:21:20.519082 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:21:20 crc kubenswrapper[4747]: I0202 09:21:20.520316 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.036599 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-f497-account-create-update-79mgp"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.045499 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-cd8b-account-create-update-4jdhl"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.053439 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-n9vgl"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.062230 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-bd6f7"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.070224 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-cbf0-account-create-update-bfwtn"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.078618 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-cd8b-account-create-update-4jdhl"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.087736 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-f497-account-create-update-79mgp"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.095539 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-bd6f7"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.104113 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-n9vgl"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.112669 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-cbf0-account-create-update-bfwtn"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.121149 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-vnmfj"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.129042 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-vnmfj"] Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.349427 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38596e39-b23f-4670-8927-c8cab809a25b" path="/var/lib/kubelet/pods/38596e39-b23f-4670-8927-c8cab809a25b/volumes" Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.350212 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="722ee392-b285-4b3f-9e61-034d352069a0" path="/var/lib/kubelet/pods/722ee392-b285-4b3f-9e61-034d352069a0/volumes" Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.350785 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7652f270-f57b-4e46-9171-79dcaa5975e0" path="/var/lib/kubelet/pods/7652f270-f57b-4e46-9171-79dcaa5975e0/volumes" Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.351420 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f2ed687-bc3f-4543-b5d5-6db15856198e" path="/var/lib/kubelet/pods/7f2ed687-bc3f-4543-b5d5-6db15856198e/volumes" Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.352467 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb" path="/var/lib/kubelet/pods/dd21b5e7-cf8f-4398-8ef3-b5a3b1912ffb/volumes" Feb 02 09:21:24 crc kubenswrapper[4747]: I0202 09:21:24.353244 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe5b470c-277c-413e-9377-a5cf2bfab33e" path="/var/lib/kubelet/pods/fe5b470c-277c-413e-9377-a5cf2bfab33e/volumes" Feb 02 09:21:28 crc kubenswrapper[4747]: I0202 09:21:28.028408 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-c8j7m"] Feb 02 09:21:28 crc kubenswrapper[4747]: I0202 09:21:28.036132 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-c8j7m"] Feb 02 09:21:28 crc kubenswrapper[4747]: I0202 09:21:28.350382 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a92c859d-6661-4ed3-888b-267a50ed2894" path="/var/lib/kubelet/pods/a92c859d-6661-4ed3-888b-267a50ed2894/volumes" Feb 02 09:21:50 crc kubenswrapper[4747]: I0202 09:21:50.518370 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:21:50 crc kubenswrapper[4747]: I0202 09:21:50.518921 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:22:00 crc kubenswrapper[4747]: I0202 09:22:00.386081 4747 generic.go:334] "Generic (PLEG): container finished" podID="b9b9686a-9848-43fb-90fd-2986e723d282" containerID="d005f2ac2c851ad9b247e22b9cd0ecf4dc5a5ffc6df7330a1a157a33a237ec66" exitCode=0 Feb 02 09:22:00 crc kubenswrapper[4747]: I0202 09:22:00.386177 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" event={"ID":"b9b9686a-9848-43fb-90fd-2986e723d282","Type":"ContainerDied","Data":"d005f2ac2c851ad9b247e22b9cd0ecf4dc5a5ffc6df7330a1a157a33a237ec66"} Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.044203 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-2vk6l"] Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.051395 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-2vk6l"] Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.785979 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.965805 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74gkr\" (UniqueName: \"kubernetes.io/projected/b9b9686a-9848-43fb-90fd-2986e723d282-kube-api-access-74gkr\") pod \"b9b9686a-9848-43fb-90fd-2986e723d282\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.965998 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-ssh-key-openstack-edpm-ipam\") pod \"b9b9686a-9848-43fb-90fd-2986e723d282\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.966102 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-inventory\") pod \"b9b9686a-9848-43fb-90fd-2986e723d282\" (UID: \"b9b9686a-9848-43fb-90fd-2986e723d282\") " Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.973025 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9b9686a-9848-43fb-90fd-2986e723d282-kube-api-access-74gkr" (OuterVolumeSpecName: "kube-api-access-74gkr") pod "b9b9686a-9848-43fb-90fd-2986e723d282" (UID: "b9b9686a-9848-43fb-90fd-2986e723d282"). InnerVolumeSpecName "kube-api-access-74gkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:22:01 crc kubenswrapper[4747]: I0202 09:22:01.995757 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b9b9686a-9848-43fb-90fd-2986e723d282" (UID: "b9b9686a-9848-43fb-90fd-2986e723d282"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.012813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-inventory" (OuterVolumeSpecName: "inventory") pod "b9b9686a-9848-43fb-90fd-2986e723d282" (UID: "b9b9686a-9848-43fb-90fd-2986e723d282"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.068672 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74gkr\" (UniqueName: \"kubernetes.io/projected/b9b9686a-9848-43fb-90fd-2986e723d282-kube-api-access-74gkr\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.068711 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.068724 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9b9686a-9848-43fb-90fd-2986e723d282-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.352392 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1079c308-67ae-4be0-903d-49b2a0f0aa59" path="/var/lib/kubelet/pods/1079c308-67ae-4be0-903d-49b2a0f0aa59/volumes" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.407048 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" event={"ID":"b9b9686a-9848-43fb-90fd-2986e723d282","Type":"ContainerDied","Data":"0c597a8460a803896e9fa13cf6cf1d4a23346ef2bdc3423738909cce7bf321d0"} Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.407127 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c597a8460a803896e9fa13cf6cf1d4a23346ef2bdc3423738909cce7bf321d0" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.407129 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-phpk2" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.499351 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp"] Feb 02 09:22:02 crc kubenswrapper[4747]: E0202 09:22:02.499750 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9b9686a-9848-43fb-90fd-2986e723d282" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.499776 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9b9686a-9848-43fb-90fd-2986e723d282" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.499986 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9b9686a-9848-43fb-90fd-2986e723d282" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.500560 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.503255 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.503356 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.503618 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.503632 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.521442 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp"] Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.678888 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.679037 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.679107 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdl2d\" (UniqueName: \"kubernetes.io/projected/464e38ca-20bd-44ab-80de-f991f6bb7909-kube-api-access-gdl2d\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.781330 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdl2d\" (UniqueName: \"kubernetes.io/projected/464e38ca-20bd-44ab-80de-f991f6bb7909-kube-api-access-gdl2d\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.781501 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.782048 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.798565 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.798583 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.805904 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdl2d\" (UniqueName: \"kubernetes.io/projected/464e38ca-20bd-44ab-80de-f991f6bb7909-kube-api-access-gdl2d\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:02 crc kubenswrapper[4747]: I0202 09:22:02.866921 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:03 crc kubenswrapper[4747]: I0202 09:22:03.378486 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp"] Feb 02 09:22:03 crc kubenswrapper[4747]: I0202 09:22:03.383102 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:22:03 crc kubenswrapper[4747]: I0202 09:22:03.421981 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" event={"ID":"464e38ca-20bd-44ab-80de-f991f6bb7909","Type":"ContainerStarted","Data":"27ceca9e462a4c86dfd535ad917949cf218ac46f0f000395ed67626d7c6e2ef3"} Feb 02 09:22:04 crc kubenswrapper[4747]: I0202 09:22:04.434579 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" event={"ID":"464e38ca-20bd-44ab-80de-f991f6bb7909","Type":"ContainerStarted","Data":"fb27b39ec7de631d7dc48a809386c5c9390c7fa5891c582a1b3e7dc441173471"} Feb 02 09:22:04 crc kubenswrapper[4747]: I0202 09:22:04.453746 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" podStartSLOduration=1.73793872 podStartE2EDuration="2.453729455s" podCreationTimestamp="2026-02-02 09:22:02 +0000 UTC" firstStartedPulling="2026-02-02 09:22:03.382834303 +0000 UTC m=+1535.927172736" lastFinishedPulling="2026-02-02 09:22:04.098625038 +0000 UTC m=+1536.642963471" observedRunningTime="2026-02-02 09:22:04.449290972 +0000 UTC m=+1536.993629425" watchObservedRunningTime="2026-02-02 09:22:04.453729455 +0000 UTC m=+1536.998067888" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.052182 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6jjbp"] Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.055566 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.059787 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jjbp"] Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.226228 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-catalog-content\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.226351 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ncq2\" (UniqueName: \"kubernetes.io/projected/7a0e940a-d4c5-4606-ad55-97c577a3465f-kube-api-access-4ncq2\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.226411 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-utilities\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.328026 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ncq2\" (UniqueName: \"kubernetes.io/projected/7a0e940a-d4c5-4606-ad55-97c577a3465f-kube-api-access-4ncq2\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.328114 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-utilities\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.328205 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-catalog-content\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.328681 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-utilities\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.328722 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-catalog-content\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.350472 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ncq2\" (UniqueName: \"kubernetes.io/projected/7a0e940a-d4c5-4606-ad55-97c577a3465f-kube-api-access-4ncq2\") pod \"redhat-marketplace-6jjbp\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.403764 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:05 crc kubenswrapper[4747]: I0202 09:22:05.859324 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jjbp"] Feb 02 09:22:05 crc kubenswrapper[4747]: W0202 09:22:05.863106 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a0e940a_d4c5_4606_ad55_97c577a3465f.slice/crio-367fd1e023e25450be1d16e90b5ce8c6a3bf72e4fce64bc2d30c80b786e75800 WatchSource:0}: Error finding container 367fd1e023e25450be1d16e90b5ce8c6a3bf72e4fce64bc2d30c80b786e75800: Status 404 returned error can't find the container with id 367fd1e023e25450be1d16e90b5ce8c6a3bf72e4fce64bc2d30c80b786e75800 Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.297666 4747 scope.go:117] "RemoveContainer" containerID="ce909cc02cf3dca12228a169d06d003e73a26c3a53be7b9383808e44cd6d21ed" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.316873 4747 scope.go:117] "RemoveContainer" containerID="c7b4173ab517938f6b17ec879f15c659f321b2f0b309837e7eb03081af0a2eca" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.348802 4747 scope.go:117] "RemoveContainer" containerID="22329a8b2f1604e91d5059f78607d8b677dcd17343278988e9b33d80ab195ae1" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.427056 4747 scope.go:117] "RemoveContainer" containerID="5e44d2b3d6150edd516f232cd1e31d1d3dbde347305d39448096bffa8f04f938" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.454226 4747 scope.go:117] "RemoveContainer" containerID="9e763e05e46e884bed283e3000106dd02ac17c1ff377fa5da802188b4577539b" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.460161 4747 generic.go:334] "Generic (PLEG): container finished" podID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerID="b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f" exitCode=0 Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.460229 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jjbp" event={"ID":"7a0e940a-d4c5-4606-ad55-97c577a3465f","Type":"ContainerDied","Data":"b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f"} Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.460256 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jjbp" event={"ID":"7a0e940a-d4c5-4606-ad55-97c577a3465f","Type":"ContainerStarted","Data":"367fd1e023e25450be1d16e90b5ce8c6a3bf72e4fce64bc2d30c80b786e75800"} Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.507019 4747 scope.go:117] "RemoveContainer" containerID="8a05e08a12d78c207b87be9ee1d57f882a54d380199c67f8e9987701ac95dc84" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.543461 4747 scope.go:117] "RemoveContainer" containerID="e4f96e29b022f07f539adf785a45ac2cf8e6660578e30394841acdb41ca81d52" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.581990 4747 scope.go:117] "RemoveContainer" containerID="515c6e3d007f2d8d3b02472ebcc2f8d945b7fc435544430a8ea193ec7b7a90be" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.631356 4747 scope.go:117] "RemoveContainer" containerID="d83e892d38c647f9a5b0aa92a9384ff2abe4189ec3191e109473c1243bd446ac" Feb 02 09:22:06 crc kubenswrapper[4747]: I0202 09:22:06.667455 4747 scope.go:117] "RemoveContainer" containerID="53ee9304ef5e75d37af4c3a7daa7d928a76668469aac88ac95eb7762869364d6" Feb 02 09:22:07 crc kubenswrapper[4747]: I0202 09:22:07.474913 4747 generic.go:334] "Generic (PLEG): container finished" podID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerID="ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972" exitCode=0 Feb 02 09:22:07 crc kubenswrapper[4747]: I0202 09:22:07.474974 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jjbp" event={"ID":"7a0e940a-d4c5-4606-ad55-97c577a3465f","Type":"ContainerDied","Data":"ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972"} Feb 02 09:22:08 crc kubenswrapper[4747]: I0202 09:22:08.485678 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jjbp" event={"ID":"7a0e940a-d4c5-4606-ad55-97c577a3465f","Type":"ContainerStarted","Data":"bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b"} Feb 02 09:22:08 crc kubenswrapper[4747]: I0202 09:22:08.517382 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6jjbp" podStartSLOduration=2.12104479 podStartE2EDuration="3.517360283s" podCreationTimestamp="2026-02-02 09:22:05 +0000 UTC" firstStartedPulling="2026-02-02 09:22:06.461600299 +0000 UTC m=+1539.005938732" lastFinishedPulling="2026-02-02 09:22:07.857915772 +0000 UTC m=+1540.402254225" observedRunningTime="2026-02-02 09:22:08.505256475 +0000 UTC m=+1541.049594918" watchObservedRunningTime="2026-02-02 09:22:08.517360283 +0000 UTC m=+1541.061698726" Feb 02 09:22:09 crc kubenswrapper[4747]: I0202 09:22:09.029586 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-kcg7d"] Feb 02 09:22:09 crc kubenswrapper[4747]: I0202 09:22:09.038914 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-kcg7d"] Feb 02 09:22:09 crc kubenswrapper[4747]: I0202 09:22:09.494660 4747 generic.go:334] "Generic (PLEG): container finished" podID="464e38ca-20bd-44ab-80de-f991f6bb7909" containerID="fb27b39ec7de631d7dc48a809386c5c9390c7fa5891c582a1b3e7dc441173471" exitCode=0 Feb 02 09:22:09 crc kubenswrapper[4747]: I0202 09:22:09.494705 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" event={"ID":"464e38ca-20bd-44ab-80de-f991f6bb7909","Type":"ContainerDied","Data":"fb27b39ec7de631d7dc48a809386c5c9390c7fa5891c582a1b3e7dc441173471"} Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.025961 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-7pc6t"] Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.034725 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-7pc6t"] Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.351714 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2551c83c-96c3-43d9-916d-04bf8bbaf85a" path="/var/lib/kubelet/pods/2551c83c-96c3-43d9-916d-04bf8bbaf85a/volumes" Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.352723 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3486730-26af-4fe1-a379-b28af74eb1e8" path="/var/lib/kubelet/pods/b3486730-26af-4fe1-a379-b28af74eb1e8/volumes" Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.894346 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.946185 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-inventory\") pod \"464e38ca-20bd-44ab-80de-f991f6bb7909\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.946276 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-ssh-key-openstack-edpm-ipam\") pod \"464e38ca-20bd-44ab-80de-f991f6bb7909\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.946337 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdl2d\" (UniqueName: \"kubernetes.io/projected/464e38ca-20bd-44ab-80de-f991f6bb7909-kube-api-access-gdl2d\") pod \"464e38ca-20bd-44ab-80de-f991f6bb7909\" (UID: \"464e38ca-20bd-44ab-80de-f991f6bb7909\") " Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.953031 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/464e38ca-20bd-44ab-80de-f991f6bb7909-kube-api-access-gdl2d" (OuterVolumeSpecName: "kube-api-access-gdl2d") pod "464e38ca-20bd-44ab-80de-f991f6bb7909" (UID: "464e38ca-20bd-44ab-80de-f991f6bb7909"). InnerVolumeSpecName "kube-api-access-gdl2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.977893 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "464e38ca-20bd-44ab-80de-f991f6bb7909" (UID: "464e38ca-20bd-44ab-80de-f991f6bb7909"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:22:10 crc kubenswrapper[4747]: I0202 09:22:10.979221 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-inventory" (OuterVolumeSpecName: "inventory") pod "464e38ca-20bd-44ab-80de-f991f6bb7909" (UID: "464e38ca-20bd-44ab-80de-f991f6bb7909"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.051048 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.051108 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/464e38ca-20bd-44ab-80de-f991f6bb7909-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.051128 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdl2d\" (UniqueName: \"kubernetes.io/projected/464e38ca-20bd-44ab-80de-f991f6bb7909-kube-api-access-gdl2d\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.523164 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" event={"ID":"464e38ca-20bd-44ab-80de-f991f6bb7909","Type":"ContainerDied","Data":"27ceca9e462a4c86dfd535ad917949cf218ac46f0f000395ed67626d7c6e2ef3"} Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.523212 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27ceca9e462a4c86dfd535ad917949cf218ac46f0f000395ed67626d7c6e2ef3" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.523270 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.602365 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442"] Feb 02 09:22:11 crc kubenswrapper[4747]: E0202 09:22:11.603044 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="464e38ca-20bd-44ab-80de-f991f6bb7909" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.603065 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="464e38ca-20bd-44ab-80de-f991f6bb7909" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.603298 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="464e38ca-20bd-44ab-80de-f991f6bb7909" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.604052 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.607320 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.607479 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.607527 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.609381 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.618887 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442"] Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.660864 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpxt2\" (UniqueName: \"kubernetes.io/projected/5eba9605-8814-46f8-be9d-5a931e56c782-kube-api-access-gpxt2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.661108 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.661175 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.763302 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.763380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.764013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpxt2\" (UniqueName: \"kubernetes.io/projected/5eba9605-8814-46f8-be9d-5a931e56c782-kube-api-access-gpxt2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.770583 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.773140 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.781106 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpxt2\" (UniqueName: \"kubernetes.io/projected/5eba9605-8814-46f8-be9d-5a931e56c782-kube-api-access-gpxt2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-w5442\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:11 crc kubenswrapper[4747]: I0202 09:22:11.940259 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.516996 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442"] Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.535395 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" event={"ID":"5eba9605-8814-46f8-be9d-5a931e56c782","Type":"ContainerStarted","Data":"699a11f92ddd4d1cf93aea70a9d05260e2b11b0bbaf450bea52052d89681da6e"} Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.684891 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fg8wh"] Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.690131 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.728024 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fg8wh"] Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.787046 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-utilities\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.787152 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvsx7\" (UniqueName: \"kubernetes.io/projected/1d219127-a71e-454c-826e-8e135a55b644-kube-api-access-cvsx7\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.787279 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-catalog-content\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.889248 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvsx7\" (UniqueName: \"kubernetes.io/projected/1d219127-a71e-454c-826e-8e135a55b644-kube-api-access-cvsx7\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.889343 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-catalog-content\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.889439 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-utilities\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.890097 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-utilities\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.890605 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-catalog-content\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:12 crc kubenswrapper[4747]: I0202 09:22:12.909851 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvsx7\" (UniqueName: \"kubernetes.io/projected/1d219127-a71e-454c-826e-8e135a55b644-kube-api-access-cvsx7\") pod \"community-operators-fg8wh\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:13 crc kubenswrapper[4747]: I0202 09:22:13.016150 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:13 crc kubenswrapper[4747]: I0202 09:22:13.571814 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" event={"ID":"5eba9605-8814-46f8-be9d-5a931e56c782","Type":"ContainerStarted","Data":"3a627d8666ddf6886cc095f460ea6574d5557b18eb927e83b11ed6139ec7ecb7"} Feb 02 09:22:13 crc kubenswrapper[4747]: I0202 09:22:13.610550 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fg8wh"] Feb 02 09:22:13 crc kubenswrapper[4747]: I0202 09:22:13.611902 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" podStartSLOduration=2.08907142 podStartE2EDuration="2.611882255s" podCreationTimestamp="2026-02-02 09:22:11 +0000 UTC" firstStartedPulling="2026-02-02 09:22:12.522425991 +0000 UTC m=+1545.066764424" lastFinishedPulling="2026-02-02 09:22:13.045236826 +0000 UTC m=+1545.589575259" observedRunningTime="2026-02-02 09:22:13.592473052 +0000 UTC m=+1546.136811485" watchObservedRunningTime="2026-02-02 09:22:13.611882255 +0000 UTC m=+1546.156220688" Feb 02 09:22:14 crc kubenswrapper[4747]: I0202 09:22:14.583155 4747 generic.go:334] "Generic (PLEG): container finished" podID="1d219127-a71e-454c-826e-8e135a55b644" containerID="5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37" exitCode=0 Feb 02 09:22:14 crc kubenswrapper[4747]: I0202 09:22:14.583225 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fg8wh" event={"ID":"1d219127-a71e-454c-826e-8e135a55b644","Type":"ContainerDied","Data":"5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37"} Feb 02 09:22:14 crc kubenswrapper[4747]: I0202 09:22:14.583300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fg8wh" event={"ID":"1d219127-a71e-454c-826e-8e135a55b644","Type":"ContainerStarted","Data":"372476b26497863a4d24cecb00c95751d652b453861f35a78796273828f8aadc"} Feb 02 09:22:15 crc kubenswrapper[4747]: I0202 09:22:15.404659 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:15 crc kubenswrapper[4747]: I0202 09:22:15.405255 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:15 crc kubenswrapper[4747]: I0202 09:22:15.468763 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:15 crc kubenswrapper[4747]: I0202 09:22:15.595173 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fg8wh" event={"ID":"1d219127-a71e-454c-826e-8e135a55b644","Type":"ContainerStarted","Data":"59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17"} Feb 02 09:22:15 crc kubenswrapper[4747]: I0202 09:22:15.646667 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:16 crc kubenswrapper[4747]: I0202 09:22:16.607914 4747 generic.go:334] "Generic (PLEG): container finished" podID="1d219127-a71e-454c-826e-8e135a55b644" containerID="59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17" exitCode=0 Feb 02 09:22:16 crc kubenswrapper[4747]: I0202 09:22:16.608082 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fg8wh" event={"ID":"1d219127-a71e-454c-826e-8e135a55b644","Type":"ContainerDied","Data":"59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17"} Feb 02 09:22:17 crc kubenswrapper[4747]: I0202 09:22:17.619241 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fg8wh" event={"ID":"1d219127-a71e-454c-826e-8e135a55b644","Type":"ContainerStarted","Data":"9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18"} Feb 02 09:22:17 crc kubenswrapper[4747]: I0202 09:22:17.672504 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fg8wh" podStartSLOduration=3.232553388 podStartE2EDuration="5.672485357s" podCreationTimestamp="2026-02-02 09:22:12 +0000 UTC" firstStartedPulling="2026-02-02 09:22:14.585185984 +0000 UTC m=+1547.129524427" lastFinishedPulling="2026-02-02 09:22:17.025117943 +0000 UTC m=+1549.569456396" observedRunningTime="2026-02-02 09:22:17.666211258 +0000 UTC m=+1550.210549691" watchObservedRunningTime="2026-02-02 09:22:17.672485357 +0000 UTC m=+1550.216823790" Feb 02 09:22:17 crc kubenswrapper[4747]: I0202 09:22:17.865676 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jjbp"] Feb 02 09:22:17 crc kubenswrapper[4747]: I0202 09:22:17.866185 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6jjbp" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="registry-server" containerID="cri-o://bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b" gracePeriod=2 Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.043571 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tmgr2"] Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.054874 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tmgr2"] Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.322394 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.365201 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73275940-43ea-40ea-ba5f-5b8e25a35f62" path="/var/lib/kubelet/pods/73275940-43ea-40ea-ba5f-5b8e25a35f62/volumes" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.387261 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-utilities\") pod \"7a0e940a-d4c5-4606-ad55-97c577a3465f\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.387330 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ncq2\" (UniqueName: \"kubernetes.io/projected/7a0e940a-d4c5-4606-ad55-97c577a3465f-kube-api-access-4ncq2\") pod \"7a0e940a-d4c5-4606-ad55-97c577a3465f\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.388122 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-utilities" (OuterVolumeSpecName: "utilities") pod "7a0e940a-d4c5-4606-ad55-97c577a3465f" (UID: "7a0e940a-d4c5-4606-ad55-97c577a3465f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.394177 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a0e940a-d4c5-4606-ad55-97c577a3465f-kube-api-access-4ncq2" (OuterVolumeSpecName: "kube-api-access-4ncq2") pod "7a0e940a-d4c5-4606-ad55-97c577a3465f" (UID: "7a0e940a-d4c5-4606-ad55-97c577a3465f"). InnerVolumeSpecName "kube-api-access-4ncq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.488739 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-catalog-content\") pod \"7a0e940a-d4c5-4606-ad55-97c577a3465f\" (UID: \"7a0e940a-d4c5-4606-ad55-97c577a3465f\") " Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.489257 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.489284 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ncq2\" (UniqueName: \"kubernetes.io/projected/7a0e940a-d4c5-4606-ad55-97c577a3465f-kube-api-access-4ncq2\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.510774 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a0e940a-d4c5-4606-ad55-97c577a3465f" (UID: "7a0e940a-d4c5-4606-ad55-97c577a3465f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.590368 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a0e940a-d4c5-4606-ad55-97c577a3465f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.631734 4747 generic.go:334] "Generic (PLEG): container finished" podID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerID="bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b" exitCode=0 Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.631824 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6jjbp" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.631890 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jjbp" event={"ID":"7a0e940a-d4c5-4606-ad55-97c577a3465f","Type":"ContainerDied","Data":"bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b"} Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.631960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6jjbp" event={"ID":"7a0e940a-d4c5-4606-ad55-97c577a3465f","Type":"ContainerDied","Data":"367fd1e023e25450be1d16e90b5ce8c6a3bf72e4fce64bc2d30c80b786e75800"} Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.631994 4747 scope.go:117] "RemoveContainer" containerID="bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.668150 4747 scope.go:117] "RemoveContainer" containerID="ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.674301 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jjbp"] Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.682586 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6jjbp"] Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.691254 4747 scope.go:117] "RemoveContainer" containerID="b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.728495 4747 scope.go:117] "RemoveContainer" containerID="bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b" Feb 02 09:22:18 crc kubenswrapper[4747]: E0202 09:22:18.728997 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b\": container with ID starting with bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b not found: ID does not exist" containerID="bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.729036 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b"} err="failed to get container status \"bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b\": rpc error: code = NotFound desc = could not find container \"bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b\": container with ID starting with bc1ac8cd20670a71ffcac55665219910c12dcb944b5748a66cdb656bdc7f640b not found: ID does not exist" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.729082 4747 scope.go:117] "RemoveContainer" containerID="ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972" Feb 02 09:22:18 crc kubenswrapper[4747]: E0202 09:22:18.729603 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972\": container with ID starting with ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972 not found: ID does not exist" containerID="ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.729624 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972"} err="failed to get container status \"ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972\": rpc error: code = NotFound desc = could not find container \"ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972\": container with ID starting with ce668c5cefa457fd7e91fe1c6ed7955fc6497897214308af2a875ac0674c3972 not found: ID does not exist" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.729637 4747 scope.go:117] "RemoveContainer" containerID="b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f" Feb 02 09:22:18 crc kubenswrapper[4747]: E0202 09:22:18.730462 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f\": container with ID starting with b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f not found: ID does not exist" containerID="b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f" Feb 02 09:22:18 crc kubenswrapper[4747]: I0202 09:22:18.730489 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f"} err="failed to get container status \"b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f\": rpc error: code = NotFound desc = could not find container \"b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f\": container with ID starting with b5fbc36bfa37ece81fc87155c5c665fbb1ba49eb6ddb76a4059e91b6b13fab0f not found: ID does not exist" Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.349882 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" path="/var/lib/kubelet/pods/7a0e940a-d4c5-4606-ad55-97c577a3465f/volumes" Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.518133 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.518191 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.518232 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.518903 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.518979 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" gracePeriod=600 Feb 02 09:22:20 crc kubenswrapper[4747]: E0202 09:22:20.636680 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.654913 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" exitCode=0 Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.654995 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5"} Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.655050 4747 scope.go:117] "RemoveContainer" containerID="b5d94ad72d0ad56a4be3ea7e313a497520093895050d2bae5908fcce140bb8a3" Feb 02 09:22:20 crc kubenswrapper[4747]: I0202 09:22:20.656266 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:22:20 crc kubenswrapper[4747]: E0202 09:22:20.656764 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:22:23 crc kubenswrapper[4747]: I0202 09:22:23.016878 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:23 crc kubenswrapper[4747]: I0202 09:22:23.018551 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:23 crc kubenswrapper[4747]: I0202 09:22:23.063919 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:23 crc kubenswrapper[4747]: I0202 09:22:23.733419 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:23 crc kubenswrapper[4747]: I0202 09:22:23.780041 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fg8wh"] Feb 02 09:22:25 crc kubenswrapper[4747]: I0202 09:22:25.705338 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fg8wh" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="registry-server" containerID="cri-o://9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18" gracePeriod=2 Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.244051 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.335814 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-utilities\") pod \"1d219127-a71e-454c-826e-8e135a55b644\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.335886 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvsx7\" (UniqueName: \"kubernetes.io/projected/1d219127-a71e-454c-826e-8e135a55b644-kube-api-access-cvsx7\") pod \"1d219127-a71e-454c-826e-8e135a55b644\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.335978 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-catalog-content\") pod \"1d219127-a71e-454c-826e-8e135a55b644\" (UID: \"1d219127-a71e-454c-826e-8e135a55b644\") " Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.337655 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-utilities" (OuterVolumeSpecName: "utilities") pod "1d219127-a71e-454c-826e-8e135a55b644" (UID: "1d219127-a71e-454c-826e-8e135a55b644"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.348382 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d219127-a71e-454c-826e-8e135a55b644-kube-api-access-cvsx7" (OuterVolumeSpecName: "kube-api-access-cvsx7") pod "1d219127-a71e-454c-826e-8e135a55b644" (UID: "1d219127-a71e-454c-826e-8e135a55b644"). InnerVolumeSpecName "kube-api-access-cvsx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.394456 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d219127-a71e-454c-826e-8e135a55b644" (UID: "1d219127-a71e-454c-826e-8e135a55b644"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.438982 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.439050 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvsx7\" (UniqueName: \"kubernetes.io/projected/1d219127-a71e-454c-826e-8e135a55b644-kube-api-access-cvsx7\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.439064 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d219127-a71e-454c-826e-8e135a55b644-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.716230 4747 generic.go:334] "Generic (PLEG): container finished" podID="1d219127-a71e-454c-826e-8e135a55b644" containerID="9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18" exitCode=0 Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.716291 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fg8wh" event={"ID":"1d219127-a71e-454c-826e-8e135a55b644","Type":"ContainerDied","Data":"9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18"} Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.716323 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fg8wh" event={"ID":"1d219127-a71e-454c-826e-8e135a55b644","Type":"ContainerDied","Data":"372476b26497863a4d24cecb00c95751d652b453861f35a78796273828f8aadc"} Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.716348 4747 scope.go:117] "RemoveContainer" containerID="9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.717401 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fg8wh" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.745609 4747 scope.go:117] "RemoveContainer" containerID="59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.762983 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fg8wh"] Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.770416 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fg8wh"] Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.776546 4747 scope.go:117] "RemoveContainer" containerID="5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.815701 4747 scope.go:117] "RemoveContainer" containerID="9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18" Feb 02 09:22:26 crc kubenswrapper[4747]: E0202 09:22:26.816292 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18\": container with ID starting with 9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18 not found: ID does not exist" containerID="9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.816330 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18"} err="failed to get container status \"9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18\": rpc error: code = NotFound desc = could not find container \"9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18\": container with ID starting with 9eea740d65ed89fca03529e223ad2accf072529768218696929bbc0415237f18 not found: ID does not exist" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.816353 4747 scope.go:117] "RemoveContainer" containerID="59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17" Feb 02 09:22:26 crc kubenswrapper[4747]: E0202 09:22:26.816692 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17\": container with ID starting with 59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17 not found: ID does not exist" containerID="59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.816746 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17"} err="failed to get container status \"59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17\": rpc error: code = NotFound desc = could not find container \"59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17\": container with ID starting with 59050628495d25b73ac7bb47829563387190f8332e3a34318ff5c11050bfad17 not found: ID does not exist" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.816760 4747 scope.go:117] "RemoveContainer" containerID="5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37" Feb 02 09:22:26 crc kubenswrapper[4747]: E0202 09:22:26.817278 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37\": container with ID starting with 5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37 not found: ID does not exist" containerID="5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37" Feb 02 09:22:26 crc kubenswrapper[4747]: I0202 09:22:26.817308 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37"} err="failed to get container status \"5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37\": rpc error: code = NotFound desc = could not find container \"5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37\": container with ID starting with 5bdbee9e83d353797690a0e3ff4ee56d6f20c28906e51b515855a8529122da37 not found: ID does not exist" Feb 02 09:22:28 crc kubenswrapper[4747]: I0202 09:22:28.026658 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-sh4ft"] Feb 02 09:22:28 crc kubenswrapper[4747]: I0202 09:22:28.035221 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-sh4ft"] Feb 02 09:22:28 crc kubenswrapper[4747]: I0202 09:22:28.354118 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11768358-bd3c-440c-ac71-1c1ad4436571" path="/var/lib/kubelet/pods/11768358-bd3c-440c-ac71-1c1ad4436571/volumes" Feb 02 09:22:28 crc kubenswrapper[4747]: I0202 09:22:28.354831 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d219127-a71e-454c-826e-8e135a55b644" path="/var/lib/kubelet/pods/1d219127-a71e-454c-826e-8e135a55b644/volumes" Feb 02 09:22:35 crc kubenswrapper[4747]: I0202 09:22:35.339917 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:22:35 crc kubenswrapper[4747]: E0202 09:22:35.340746 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.698683 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pbh8r"] Feb 02 09:22:38 crc kubenswrapper[4747]: E0202 09:22:38.699478 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="registry-server" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699494 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="registry-server" Feb 02 09:22:38 crc kubenswrapper[4747]: E0202 09:22:38.699513 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="extract-utilities" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699543 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="extract-utilities" Feb 02 09:22:38 crc kubenswrapper[4747]: E0202 09:22:38.699553 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="extract-content" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699560 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="extract-content" Feb 02 09:22:38 crc kubenswrapper[4747]: E0202 09:22:38.699591 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="registry-server" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699599 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="registry-server" Feb 02 09:22:38 crc kubenswrapper[4747]: E0202 09:22:38.699615 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="extract-utilities" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699623 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="extract-utilities" Feb 02 09:22:38 crc kubenswrapper[4747]: E0202 09:22:38.699636 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="extract-content" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699643 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="extract-content" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699860 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a0e940a-d4c5-4606-ad55-97c577a3465f" containerName="registry-server" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.699884 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d219127-a71e-454c-826e-8e135a55b644" containerName="registry-server" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.701503 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.716530 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pbh8r"] Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.879699 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-utilities\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.879900 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvw9s\" (UniqueName: \"kubernetes.io/projected/af609998-77ba-4113-935d-3eaf39d62332-kube-api-access-vvw9s\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.880025 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-catalog-content\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.982057 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-utilities\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.982572 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvw9s\" (UniqueName: \"kubernetes.io/projected/af609998-77ba-4113-935d-3eaf39d62332-kube-api-access-vvw9s\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.982818 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-catalog-content\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.982580 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-utilities\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:38 crc kubenswrapper[4747]: I0202 09:22:38.983184 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-catalog-content\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:39 crc kubenswrapper[4747]: I0202 09:22:39.006918 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvw9s\" (UniqueName: \"kubernetes.io/projected/af609998-77ba-4113-935d-3eaf39d62332-kube-api-access-vvw9s\") pod \"redhat-operators-pbh8r\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:39 crc kubenswrapper[4747]: I0202 09:22:39.022589 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:39 crc kubenswrapper[4747]: I0202 09:22:39.498701 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pbh8r"] Feb 02 09:22:39 crc kubenswrapper[4747]: I0202 09:22:39.823565 4747 generic.go:334] "Generic (PLEG): container finished" podID="af609998-77ba-4113-935d-3eaf39d62332" containerID="3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383" exitCode=0 Feb 02 09:22:39 crc kubenswrapper[4747]: I0202 09:22:39.823627 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbh8r" event={"ID":"af609998-77ba-4113-935d-3eaf39d62332","Type":"ContainerDied","Data":"3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383"} Feb 02 09:22:39 crc kubenswrapper[4747]: I0202 09:22:39.823682 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbh8r" event={"ID":"af609998-77ba-4113-935d-3eaf39d62332","Type":"ContainerStarted","Data":"c5d632949ed116e419a61562e5726507c0f70a174d1ca535cbafbc151e54245d"} Feb 02 09:22:40 crc kubenswrapper[4747]: I0202 09:22:40.833583 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbh8r" event={"ID":"af609998-77ba-4113-935d-3eaf39d62332","Type":"ContainerStarted","Data":"260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35"} Feb 02 09:22:41 crc kubenswrapper[4747]: I0202 09:22:41.843244 4747 generic.go:334] "Generic (PLEG): container finished" podID="af609998-77ba-4113-935d-3eaf39d62332" containerID="260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35" exitCode=0 Feb 02 09:22:41 crc kubenswrapper[4747]: I0202 09:22:41.843314 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbh8r" event={"ID":"af609998-77ba-4113-935d-3eaf39d62332","Type":"ContainerDied","Data":"260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35"} Feb 02 09:22:42 crc kubenswrapper[4747]: I0202 09:22:42.854858 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbh8r" event={"ID":"af609998-77ba-4113-935d-3eaf39d62332","Type":"ContainerStarted","Data":"8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7"} Feb 02 09:22:42 crc kubenswrapper[4747]: I0202 09:22:42.885587 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pbh8r" podStartSLOduration=2.489302076 podStartE2EDuration="4.885565844s" podCreationTimestamp="2026-02-02 09:22:38 +0000 UTC" firstStartedPulling="2026-02-02 09:22:39.825534034 +0000 UTC m=+1572.369872467" lastFinishedPulling="2026-02-02 09:22:42.221797802 +0000 UTC m=+1574.766136235" observedRunningTime="2026-02-02 09:22:42.882716801 +0000 UTC m=+1575.427055244" watchObservedRunningTime="2026-02-02 09:22:42.885565844 +0000 UTC m=+1575.429904267" Feb 02 09:22:45 crc kubenswrapper[4747]: I0202 09:22:45.883298 4747 generic.go:334] "Generic (PLEG): container finished" podID="5eba9605-8814-46f8-be9d-5a931e56c782" containerID="3a627d8666ddf6886cc095f460ea6574d5557b18eb927e83b11ed6139ec7ecb7" exitCode=0 Feb 02 09:22:45 crc kubenswrapper[4747]: I0202 09:22:45.883388 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" event={"ID":"5eba9605-8814-46f8-be9d-5a931e56c782","Type":"ContainerDied","Data":"3a627d8666ddf6886cc095f460ea6574d5557b18eb927e83b11ed6139ec7ecb7"} Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.326088 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.454670 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpxt2\" (UniqueName: \"kubernetes.io/projected/5eba9605-8814-46f8-be9d-5a931e56c782-kube-api-access-gpxt2\") pod \"5eba9605-8814-46f8-be9d-5a931e56c782\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.455139 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-ssh-key-openstack-edpm-ipam\") pod \"5eba9605-8814-46f8-be9d-5a931e56c782\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.455237 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-inventory\") pod \"5eba9605-8814-46f8-be9d-5a931e56c782\" (UID: \"5eba9605-8814-46f8-be9d-5a931e56c782\") " Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.466298 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eba9605-8814-46f8-be9d-5a931e56c782-kube-api-access-gpxt2" (OuterVolumeSpecName: "kube-api-access-gpxt2") pod "5eba9605-8814-46f8-be9d-5a931e56c782" (UID: "5eba9605-8814-46f8-be9d-5a931e56c782"). InnerVolumeSpecName "kube-api-access-gpxt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.488697 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-inventory" (OuterVolumeSpecName: "inventory") pod "5eba9605-8814-46f8-be9d-5a931e56c782" (UID: "5eba9605-8814-46f8-be9d-5a931e56c782"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.496061 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "5eba9605-8814-46f8-be9d-5a931e56c782" (UID: "5eba9605-8814-46f8-be9d-5a931e56c782"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.557827 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.557869 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eba9605-8814-46f8-be9d-5a931e56c782-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.557883 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpxt2\" (UniqueName: \"kubernetes.io/projected/5eba9605-8814-46f8-be9d-5a931e56c782-kube-api-access-gpxt2\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.901836 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" event={"ID":"5eba9605-8814-46f8-be9d-5a931e56c782","Type":"ContainerDied","Data":"699a11f92ddd4d1cf93aea70a9d05260e2b11b0bbaf450bea52052d89681da6e"} Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.901876 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="699a11f92ddd4d1cf93aea70a9d05260e2b11b0bbaf450bea52052d89681da6e" Feb 02 09:22:47 crc kubenswrapper[4747]: I0202 09:22:47.902000 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-w5442" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.023063 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h"] Feb 02 09:22:48 crc kubenswrapper[4747]: E0202 09:22:48.023499 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eba9605-8814-46f8-be9d-5a931e56c782" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.023523 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eba9605-8814-46f8-be9d-5a931e56c782" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.023776 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eba9605-8814-46f8-be9d-5a931e56c782" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.024555 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.027505 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.027959 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.030176 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.033714 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.052455 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h"] Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.068547 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.068662 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.068728 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sp6t\" (UniqueName: \"kubernetes.io/projected/4d06633a-c9ea-4ae5-a60e-febcf39cead2-kube-api-access-4sp6t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.169998 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.170068 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sp6t\" (UniqueName: \"kubernetes.io/projected/4d06633a-c9ea-4ae5-a60e-febcf39cead2-kube-api-access-4sp6t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.170160 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.175620 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.176058 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.187005 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sp6t\" (UniqueName: \"kubernetes.io/projected/4d06633a-c9ea-4ae5-a60e-febcf39cead2-kube-api-access-4sp6t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.342166 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.870235 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h"] Feb 02 09:22:48 crc kubenswrapper[4747]: I0202 09:22:48.911245 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" event={"ID":"4d06633a-c9ea-4ae5-a60e-febcf39cead2","Type":"ContainerStarted","Data":"3b08284824d9da34cb38b405f149235bbed04643b1ff1f98be0aa0d4e18a33da"} Feb 02 09:22:49 crc kubenswrapper[4747]: I0202 09:22:49.023676 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:49 crc kubenswrapper[4747]: I0202 09:22:49.024025 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:49 crc kubenswrapper[4747]: I0202 09:22:49.076713 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:49 crc kubenswrapper[4747]: I0202 09:22:49.943303 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" event={"ID":"4d06633a-c9ea-4ae5-a60e-febcf39cead2","Type":"ContainerStarted","Data":"a7a8ea06a66cae24e9b2676b73d30fa27246dd2234b17ec810fae58057c68674"} Feb 02 09:22:49 crc kubenswrapper[4747]: I0202 09:22:49.965001 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" podStartSLOduration=2.557198819 podStartE2EDuration="2.964983446s" podCreationTimestamp="2026-02-02 09:22:47 +0000 UTC" firstStartedPulling="2026-02-02 09:22:48.864481781 +0000 UTC m=+1581.408820214" lastFinishedPulling="2026-02-02 09:22:49.272266408 +0000 UTC m=+1581.816604841" observedRunningTime="2026-02-02 09:22:49.957477175 +0000 UTC m=+1582.501815618" watchObservedRunningTime="2026-02-02 09:22:49.964983446 +0000 UTC m=+1582.509321879" Feb 02 09:22:49 crc kubenswrapper[4747]: I0202 09:22:49.999350 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:50 crc kubenswrapper[4747]: I0202 09:22:50.046634 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pbh8r"] Feb 02 09:22:50 crc kubenswrapper[4747]: I0202 09:22:50.339814 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:22:50 crc kubenswrapper[4747]: E0202 09:22:50.340142 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:22:51 crc kubenswrapper[4747]: I0202 09:22:51.956241 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pbh8r" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="registry-server" containerID="cri-o://8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7" gracePeriod=2 Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.432266 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.554928 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-utilities\") pod \"af609998-77ba-4113-935d-3eaf39d62332\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.555306 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvw9s\" (UniqueName: \"kubernetes.io/projected/af609998-77ba-4113-935d-3eaf39d62332-kube-api-access-vvw9s\") pod \"af609998-77ba-4113-935d-3eaf39d62332\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.555348 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-catalog-content\") pod \"af609998-77ba-4113-935d-3eaf39d62332\" (UID: \"af609998-77ba-4113-935d-3eaf39d62332\") " Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.556098 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-utilities" (OuterVolumeSpecName: "utilities") pod "af609998-77ba-4113-935d-3eaf39d62332" (UID: "af609998-77ba-4113-935d-3eaf39d62332"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.565612 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af609998-77ba-4113-935d-3eaf39d62332-kube-api-access-vvw9s" (OuterVolumeSpecName: "kube-api-access-vvw9s") pod "af609998-77ba-4113-935d-3eaf39d62332" (UID: "af609998-77ba-4113-935d-3eaf39d62332"). InnerVolumeSpecName "kube-api-access-vvw9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.657119 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.657153 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvw9s\" (UniqueName: \"kubernetes.io/projected/af609998-77ba-4113-935d-3eaf39d62332-kube-api-access-vvw9s\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.674756 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af609998-77ba-4113-935d-3eaf39d62332" (UID: "af609998-77ba-4113-935d-3eaf39d62332"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.762194 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af609998-77ba-4113-935d-3eaf39d62332-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.966472 4747 generic.go:334] "Generic (PLEG): container finished" podID="af609998-77ba-4113-935d-3eaf39d62332" containerID="8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7" exitCode=0 Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.966517 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbh8r" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.966520 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbh8r" event={"ID":"af609998-77ba-4113-935d-3eaf39d62332","Type":"ContainerDied","Data":"8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7"} Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.966638 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbh8r" event={"ID":"af609998-77ba-4113-935d-3eaf39d62332","Type":"ContainerDied","Data":"c5d632949ed116e419a61562e5726507c0f70a174d1ca535cbafbc151e54245d"} Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.966665 4747 scope.go:117] "RemoveContainer" containerID="8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7" Feb 02 09:22:52 crc kubenswrapper[4747]: I0202 09:22:52.998320 4747 scope.go:117] "RemoveContainer" containerID="260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35" Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.002252 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pbh8r"] Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.013949 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pbh8r"] Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.021993 4747 scope.go:117] "RemoveContainer" containerID="3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383" Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.062549 4747 scope.go:117] "RemoveContainer" containerID="8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7" Feb 02 09:22:53 crc kubenswrapper[4747]: E0202 09:22:53.062995 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7\": container with ID starting with 8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7 not found: ID does not exist" containerID="8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7" Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.063034 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7"} err="failed to get container status \"8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7\": rpc error: code = NotFound desc = could not find container \"8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7\": container with ID starting with 8d9442019ef4d9f93fb5fea975de5453f3723907fdded86dbcf6f4c612f75cd7 not found: ID does not exist" Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.063054 4747 scope.go:117] "RemoveContainer" containerID="260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35" Feb 02 09:22:53 crc kubenswrapper[4747]: E0202 09:22:53.063442 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35\": container with ID starting with 260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35 not found: ID does not exist" containerID="260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35" Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.063478 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35"} err="failed to get container status \"260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35\": rpc error: code = NotFound desc = could not find container \"260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35\": container with ID starting with 260196c1863ac88ea305603faff52afdf96f451ab74a09977e7b6d33d0aaac35 not found: ID does not exist" Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.063496 4747 scope.go:117] "RemoveContainer" containerID="3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383" Feb 02 09:22:53 crc kubenswrapper[4747]: E0202 09:22:53.063827 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383\": container with ID starting with 3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383 not found: ID does not exist" containerID="3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383" Feb 02 09:22:53 crc kubenswrapper[4747]: I0202 09:22:53.063852 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383"} err="failed to get container status \"3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383\": rpc error: code = NotFound desc = could not find container \"3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383\": container with ID starting with 3759b001da5f4af13eea29cfb8289e864904c323d8182d1b4c095ddb40405383 not found: ID does not exist" Feb 02 09:22:54 crc kubenswrapper[4747]: I0202 09:22:54.351961 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af609998-77ba-4113-935d-3eaf39d62332" path="/var/lib/kubelet/pods/af609998-77ba-4113-935d-3eaf39d62332/volumes" Feb 02 09:23:03 crc kubenswrapper[4747]: I0202 09:23:03.339331 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:23:03 crc kubenswrapper[4747]: E0202 09:23:03.340280 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.043302 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-lpmrk"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.053053 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-wm4f5"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.063180 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-9429-account-create-update-26wsc"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.073665 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-q4q4t"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.083628 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-be8f-account-create-update-62ffh"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.092844 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-lpmrk"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.101115 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-wm4f5"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.108741 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-be8f-account-create-update-62ffh"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.119216 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-9429-account-create-update-26wsc"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.126740 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-dd8a-account-create-update-5dpcd"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.135283 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-q4q4t"] Feb 02 09:23:05 crc kubenswrapper[4747]: I0202 09:23:05.143384 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-dd8a-account-create-update-5dpcd"] Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.351178 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f897693-3330-45c3-8c0b-d0fff9970b4b" path="/var/lib/kubelet/pods/0f897693-3330-45c3-8c0b-d0fff9970b4b/volumes" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.352015 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="117bb1fe-06bc-4df1-82b2-901af8bb8287" path="/var/lib/kubelet/pods/117bb1fe-06bc-4df1-82b2-901af8bb8287/volumes" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.352566 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="233dd940-ae1f-48d9-acee-ba069d7a93fb" path="/var/lib/kubelet/pods/233dd940-ae1f-48d9-acee-ba069d7a93fb/volumes" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.353093 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77fb1b7e-19e7-4d04-afc7-b55138b71d95" path="/var/lib/kubelet/pods/77fb1b7e-19e7-4d04-afc7-b55138b71d95/volumes" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.354078 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9205288-ed77-41ad-8feb-a3ddbb0646ac" path="/var/lib/kubelet/pods/a9205288-ed77-41ad-8feb-a3ddbb0646ac/volumes" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.354650 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa" path="/var/lib/kubelet/pods/ff2bf109-c791-4fcb-a4ff-d6ccfed7e4aa/volumes" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.863093 4747 scope.go:117] "RemoveContainer" containerID="6b01ac03b256a3515e17f65bb4cff1287ab3eddb5bc5d11649adef74d9503171" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.886595 4747 scope.go:117] "RemoveContainer" containerID="5e7f6ea423e81d9a05cfb8f9a49014a60051694136eb02e7aae65a77a804dc38" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.943681 4747 scope.go:117] "RemoveContainer" containerID="7d13ad68862e10421a053d1361ac149bd392fbc361043b481fc12bf271dd4737" Feb 02 09:23:06 crc kubenswrapper[4747]: I0202 09:23:06.986627 4747 scope.go:117] "RemoveContainer" containerID="f2375400f72549bab36ff9dba42c1e3614064d8ee53e69aa1445bdca98966476" Feb 02 09:23:07 crc kubenswrapper[4747]: I0202 09:23:07.032694 4747 scope.go:117] "RemoveContainer" containerID="f61d25e01ba47aff67e10bfa98efbe79fcc663e4dd181b3cc89ee20e7462b889" Feb 02 09:23:07 crc kubenswrapper[4747]: I0202 09:23:07.100970 4747 scope.go:117] "RemoveContainer" containerID="14c87cc8611852653ad444035133d8c121e223ed2527e2609e76c6506b6b30e0" Feb 02 09:23:07 crc kubenswrapper[4747]: I0202 09:23:07.122200 4747 scope.go:117] "RemoveContainer" containerID="73e7d55ef5533157a939da1439e9ccee720d4a1de5cfb2b5cfdbf9d1d547ba47" Feb 02 09:23:07 crc kubenswrapper[4747]: I0202 09:23:07.156921 4747 scope.go:117] "RemoveContainer" containerID="0476a2c507e535ce5fafcb4d21bb793557dd7ef2ffd877c1bf92b3519d343ead" Feb 02 09:23:07 crc kubenswrapper[4747]: I0202 09:23:07.172909 4747 scope.go:117] "RemoveContainer" containerID="67bef59bf70c65c9d9b3820186fcb391a2c5d4ea690f558d792d64c05cea4ad7" Feb 02 09:23:07 crc kubenswrapper[4747]: I0202 09:23:07.197574 4747 scope.go:117] "RemoveContainer" containerID="91ac5a4e6796a03cba0420ab6f710cb5e6874f93e48f8aa67d401e422e4272c4" Feb 02 09:23:16 crc kubenswrapper[4747]: I0202 09:23:16.339309 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:23:16 crc kubenswrapper[4747]: E0202 09:23:16.340133 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:23:29 crc kubenswrapper[4747]: I0202 09:23:29.339876 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:23:29 crc kubenswrapper[4747]: E0202 09:23:29.340598 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:23:30 crc kubenswrapper[4747]: I0202 09:23:30.043515 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rvtdm"] Feb 02 09:23:30 crc kubenswrapper[4747]: I0202 09:23:30.051516 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rvtdm"] Feb 02 09:23:30 crc kubenswrapper[4747]: I0202 09:23:30.350220 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="742b6645-afca-42df-9aad-6b6e1e93790b" path="/var/lib/kubelet/pods/742b6645-afca-42df-9aad-6b6e1e93790b/volumes" Feb 02 09:23:34 crc kubenswrapper[4747]: I0202 09:23:34.350750 4747 generic.go:334] "Generic (PLEG): container finished" podID="4d06633a-c9ea-4ae5-a60e-febcf39cead2" containerID="a7a8ea06a66cae24e9b2676b73d30fa27246dd2234b17ec810fae58057c68674" exitCode=0 Feb 02 09:23:34 crc kubenswrapper[4747]: I0202 09:23:34.350889 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" event={"ID":"4d06633a-c9ea-4ae5-a60e-febcf39cead2","Type":"ContainerDied","Data":"a7a8ea06a66cae24e9b2676b73d30fa27246dd2234b17ec810fae58057c68674"} Feb 02 09:23:35 crc kubenswrapper[4747]: I0202 09:23:35.821729 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:23:35 crc kubenswrapper[4747]: I0202 09:23:35.951789 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-inventory\") pod \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " Feb 02 09:23:35 crc kubenswrapper[4747]: I0202 09:23:35.951898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sp6t\" (UniqueName: \"kubernetes.io/projected/4d06633a-c9ea-4ae5-a60e-febcf39cead2-kube-api-access-4sp6t\") pod \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " Feb 02 09:23:35 crc kubenswrapper[4747]: I0202 09:23:35.952081 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-ssh-key-openstack-edpm-ipam\") pod \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\" (UID: \"4d06633a-c9ea-4ae5-a60e-febcf39cead2\") " Feb 02 09:23:35 crc kubenswrapper[4747]: I0202 09:23:35.962195 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d06633a-c9ea-4ae5-a60e-febcf39cead2-kube-api-access-4sp6t" (OuterVolumeSpecName: "kube-api-access-4sp6t") pod "4d06633a-c9ea-4ae5-a60e-febcf39cead2" (UID: "4d06633a-c9ea-4ae5-a60e-febcf39cead2"). InnerVolumeSpecName "kube-api-access-4sp6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:23:35 crc kubenswrapper[4747]: I0202 09:23:35.980433 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-inventory" (OuterVolumeSpecName: "inventory") pod "4d06633a-c9ea-4ae5-a60e-febcf39cead2" (UID: "4d06633a-c9ea-4ae5-a60e-febcf39cead2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:23:35 crc kubenswrapper[4747]: I0202 09:23:35.992262 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "4d06633a-c9ea-4ae5-a60e-febcf39cead2" (UID: "4d06633a-c9ea-4ae5-a60e-febcf39cead2"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.054806 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.054868 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d06633a-c9ea-4ae5-a60e-febcf39cead2-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.054880 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sp6t\" (UniqueName: \"kubernetes.io/projected/4d06633a-c9ea-4ae5-a60e-febcf39cead2-kube-api-access-4sp6t\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.371160 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" event={"ID":"4d06633a-c9ea-4ae5-a60e-febcf39cead2","Type":"ContainerDied","Data":"3b08284824d9da34cb38b405f149235bbed04643b1ff1f98be0aa0d4e18a33da"} Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.371215 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b08284824d9da34cb38b405f149235bbed04643b1ff1f98be0aa0d4e18a33da" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.371227 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.472755 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-72dfk"] Feb 02 09:23:36 crc kubenswrapper[4747]: E0202 09:23:36.473291 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="extract-utilities" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.473317 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="extract-utilities" Feb 02 09:23:36 crc kubenswrapper[4747]: E0202 09:23:36.473334 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d06633a-c9ea-4ae5-a60e-febcf39cead2" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.473345 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d06633a-c9ea-4ae5-a60e-febcf39cead2" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:23:36 crc kubenswrapper[4747]: E0202 09:23:36.473368 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="registry-server" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.473376 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="registry-server" Feb 02 09:23:36 crc kubenswrapper[4747]: E0202 09:23:36.473395 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="extract-content" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.473403 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="extract-content" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.473633 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="af609998-77ba-4113-935d-3eaf39d62332" containerName="registry-server" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.473657 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d06633a-c9ea-4ae5-a60e-febcf39cead2" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.474437 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.478549 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.478830 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.478922 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.479085 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.485501 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-72dfk"] Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.565557 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.565637 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.565669 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knlr9\" (UniqueName: \"kubernetes.io/projected/1e97ae5d-7c9d-423b-8fbb-c00644f23335-kube-api-access-knlr9\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.668027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.668688 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.668739 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knlr9\" (UniqueName: \"kubernetes.io/projected/1e97ae5d-7c9d-423b-8fbb-c00644f23335-kube-api-access-knlr9\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.674375 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.677051 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.690203 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knlr9\" (UniqueName: \"kubernetes.io/projected/1e97ae5d-7c9d-423b-8fbb-c00644f23335-kube-api-access-knlr9\") pod \"ssh-known-hosts-edpm-deployment-72dfk\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:36 crc kubenswrapper[4747]: I0202 09:23:36.796209 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:37 crc kubenswrapper[4747]: I0202 09:23:37.303136 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-72dfk"] Feb 02 09:23:37 crc kubenswrapper[4747]: I0202 09:23:37.379603 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" event={"ID":"1e97ae5d-7c9d-423b-8fbb-c00644f23335","Type":"ContainerStarted","Data":"f2c43edbd7acbeb4e322f3a559f94cdaee2029625c1841dfdbc7d6813fa26f18"} Feb 02 09:23:38 crc kubenswrapper[4747]: I0202 09:23:38.396016 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" event={"ID":"1e97ae5d-7c9d-423b-8fbb-c00644f23335","Type":"ContainerStarted","Data":"e761d01ed699cba81f0577e1d34e52d368fc9227de08ac3bfb8650bfb874bb45"} Feb 02 09:23:38 crc kubenswrapper[4747]: I0202 09:23:38.424669 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" podStartSLOduration=1.78078367 podStartE2EDuration="2.424649721s" podCreationTimestamp="2026-02-02 09:23:36 +0000 UTC" firstStartedPulling="2026-02-02 09:23:37.29361504 +0000 UTC m=+1629.837953473" lastFinishedPulling="2026-02-02 09:23:37.937481101 +0000 UTC m=+1630.481819524" observedRunningTime="2026-02-02 09:23:38.415209561 +0000 UTC m=+1630.959548014" watchObservedRunningTime="2026-02-02 09:23:38.424649721 +0000 UTC m=+1630.968988154" Feb 02 09:23:44 crc kubenswrapper[4747]: I0202 09:23:44.341001 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:23:44 crc kubenswrapper[4747]: E0202 09:23:44.342370 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:23:44 crc kubenswrapper[4747]: I0202 09:23:44.450969 4747 generic.go:334] "Generic (PLEG): container finished" podID="1e97ae5d-7c9d-423b-8fbb-c00644f23335" containerID="e761d01ed699cba81f0577e1d34e52d368fc9227de08ac3bfb8650bfb874bb45" exitCode=0 Feb 02 09:23:44 crc kubenswrapper[4747]: I0202 09:23:44.451012 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" event={"ID":"1e97ae5d-7c9d-423b-8fbb-c00644f23335","Type":"ContainerDied","Data":"e761d01ed699cba81f0577e1d34e52d368fc9227de08ac3bfb8650bfb874bb45"} Feb 02 09:23:45 crc kubenswrapper[4747]: I0202 09:23:45.841255 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:45 crc kubenswrapper[4747]: I0202 09:23:45.942517 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-inventory-0\") pod \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " Feb 02 09:23:45 crc kubenswrapper[4747]: I0202 09:23:45.942707 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-ssh-key-openstack-edpm-ipam\") pod \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " Feb 02 09:23:45 crc kubenswrapper[4747]: I0202 09:23:45.942874 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knlr9\" (UniqueName: \"kubernetes.io/projected/1e97ae5d-7c9d-423b-8fbb-c00644f23335-kube-api-access-knlr9\") pod \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\" (UID: \"1e97ae5d-7c9d-423b-8fbb-c00644f23335\") " Feb 02 09:23:45 crc kubenswrapper[4747]: I0202 09:23:45.947899 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e97ae5d-7c9d-423b-8fbb-c00644f23335-kube-api-access-knlr9" (OuterVolumeSpecName: "kube-api-access-knlr9") pod "1e97ae5d-7c9d-423b-8fbb-c00644f23335" (UID: "1e97ae5d-7c9d-423b-8fbb-c00644f23335"). InnerVolumeSpecName "kube-api-access-knlr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:23:45 crc kubenswrapper[4747]: I0202 09:23:45.967767 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "1e97ae5d-7c9d-423b-8fbb-c00644f23335" (UID: "1e97ae5d-7c9d-423b-8fbb-c00644f23335"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:23:45 crc kubenswrapper[4747]: I0202 09:23:45.969747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1e97ae5d-7c9d-423b-8fbb-c00644f23335" (UID: "1e97ae5d-7c9d-423b-8fbb-c00644f23335"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.045045 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knlr9\" (UniqueName: \"kubernetes.io/projected/1e97ae5d-7c9d-423b-8fbb-c00644f23335-kube-api-access-knlr9\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.045348 4747 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-inventory-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.045364 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1e97ae5d-7c9d-423b-8fbb-c00644f23335-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.477410 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" event={"ID":"1e97ae5d-7c9d-423b-8fbb-c00644f23335","Type":"ContainerDied","Data":"f2c43edbd7acbeb4e322f3a559f94cdaee2029625c1841dfdbc7d6813fa26f18"} Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.477474 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2c43edbd7acbeb4e322f3a559f94cdaee2029625c1841dfdbc7d6813fa26f18" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.477712 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-72dfk" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.552441 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf"] Feb 02 09:23:46 crc kubenswrapper[4747]: E0202 09:23:46.552768 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e97ae5d-7c9d-423b-8fbb-c00644f23335" containerName="ssh-known-hosts-edpm-deployment" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.552784 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e97ae5d-7c9d-423b-8fbb-c00644f23335" containerName="ssh-known-hosts-edpm-deployment" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.555099 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e97ae5d-7c9d-423b-8fbb-c00644f23335" containerName="ssh-known-hosts-edpm-deployment" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.555734 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.563986 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.565108 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.565393 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.565748 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.578017 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf"] Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.658689 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpm8s\" (UniqueName: \"kubernetes.io/projected/17233d25-c081-446c-a7a9-2967a227c731-kube-api-access-fpm8s\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.658875 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.658918 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.760875 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.760967 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.761045 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpm8s\" (UniqueName: \"kubernetes.io/projected/17233d25-c081-446c-a7a9-2967a227c731-kube-api-access-fpm8s\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.767697 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.768770 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.779778 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpm8s\" (UniqueName: \"kubernetes.io/projected/17233d25-c081-446c-a7a9-2967a227c731-kube-api-access-fpm8s\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmfbf\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:46 crc kubenswrapper[4747]: I0202 09:23:46.878895 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:47 crc kubenswrapper[4747]: I0202 09:23:47.354589 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf"] Feb 02 09:23:47 crc kubenswrapper[4747]: W0202 09:23:47.356336 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17233d25_c081_446c_a7a9_2967a227c731.slice/crio-bc444e3ff27ee3f66cf5dd56b38f46108da6060c8d564ee04c2bdcb81fd992af WatchSource:0}: Error finding container bc444e3ff27ee3f66cf5dd56b38f46108da6060c8d564ee04c2bdcb81fd992af: Status 404 returned error can't find the container with id bc444e3ff27ee3f66cf5dd56b38f46108da6060c8d564ee04c2bdcb81fd992af Feb 02 09:23:47 crc kubenswrapper[4747]: I0202 09:23:47.485144 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" event={"ID":"17233d25-c081-446c-a7a9-2967a227c731","Type":"ContainerStarted","Data":"bc444e3ff27ee3f66cf5dd56b38f46108da6060c8d564ee04c2bdcb81fd992af"} Feb 02 09:23:48 crc kubenswrapper[4747]: I0202 09:23:48.498402 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" event={"ID":"17233d25-c081-446c-a7a9-2967a227c731","Type":"ContainerStarted","Data":"9c64276e5dfb24e749bb173e68d6d406de63e12c043538bfd3e71a669040341c"} Feb 02 09:23:48 crc kubenswrapper[4747]: I0202 09:23:48.515590 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" podStartSLOduration=2.070170059 podStartE2EDuration="2.515571097s" podCreationTimestamp="2026-02-02 09:23:46 +0000 UTC" firstStartedPulling="2026-02-02 09:23:47.358721982 +0000 UTC m=+1639.903060415" lastFinishedPulling="2026-02-02 09:23:47.80412301 +0000 UTC m=+1640.348461453" observedRunningTime="2026-02-02 09:23:48.51251363 +0000 UTC m=+1641.056852073" watchObservedRunningTime="2026-02-02 09:23:48.515571097 +0000 UTC m=+1641.059909530" Feb 02 09:23:55 crc kubenswrapper[4747]: I0202 09:23:55.553928 4747 generic.go:334] "Generic (PLEG): container finished" podID="17233d25-c081-446c-a7a9-2967a227c731" containerID="9c64276e5dfb24e749bb173e68d6d406de63e12c043538bfd3e71a669040341c" exitCode=0 Feb 02 09:23:55 crc kubenswrapper[4747]: I0202 09:23:55.554001 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" event={"ID":"17233d25-c081-446c-a7a9-2967a227c731","Type":"ContainerDied","Data":"9c64276e5dfb24e749bb173e68d6d406de63e12c043538bfd3e71a669040341c"} Feb 02 09:23:56 crc kubenswrapper[4747]: I0202 09:23:56.339772 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:23:56 crc kubenswrapper[4747]: E0202 09:23:56.340424 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.039276 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.171881 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpm8s\" (UniqueName: \"kubernetes.io/projected/17233d25-c081-446c-a7a9-2967a227c731-kube-api-access-fpm8s\") pod \"17233d25-c081-446c-a7a9-2967a227c731\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.172025 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-inventory\") pod \"17233d25-c081-446c-a7a9-2967a227c731\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.172148 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-ssh-key-openstack-edpm-ipam\") pod \"17233d25-c081-446c-a7a9-2967a227c731\" (UID: \"17233d25-c081-446c-a7a9-2967a227c731\") " Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.180334 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17233d25-c081-446c-a7a9-2967a227c731-kube-api-access-fpm8s" (OuterVolumeSpecName: "kube-api-access-fpm8s") pod "17233d25-c081-446c-a7a9-2967a227c731" (UID: "17233d25-c081-446c-a7a9-2967a227c731"). InnerVolumeSpecName "kube-api-access-fpm8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.201890 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-inventory" (OuterVolumeSpecName: "inventory") pod "17233d25-c081-446c-a7a9-2967a227c731" (UID: "17233d25-c081-446c-a7a9-2967a227c731"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.220002 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "17233d25-c081-446c-a7a9-2967a227c731" (UID: "17233d25-c081-446c-a7a9-2967a227c731"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.274760 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.274802 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpm8s\" (UniqueName: \"kubernetes.io/projected/17233d25-c081-446c-a7a9-2967a227c731-kube-api-access-fpm8s\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.274816 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17233d25-c081-446c-a7a9-2967a227c731-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.573324 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" event={"ID":"17233d25-c081-446c-a7a9-2967a227c731","Type":"ContainerDied","Data":"bc444e3ff27ee3f66cf5dd56b38f46108da6060c8d564ee04c2bdcb81fd992af"} Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.573707 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc444e3ff27ee3f66cf5dd56b38f46108da6060c8d564ee04c2bdcb81fd992af" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.573439 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmfbf" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.653633 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt"] Feb 02 09:23:57 crc kubenswrapper[4747]: E0202 09:23:57.654056 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17233d25-c081-446c-a7a9-2967a227c731" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.654073 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="17233d25-c081-446c-a7a9-2967a227c731" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.654231 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="17233d25-c081-446c-a7a9-2967a227c731" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.654809 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.656980 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.657079 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.657196 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.657280 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.676321 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt"] Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.687568 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m6d7\" (UniqueName: \"kubernetes.io/projected/8cc0ed13-8498-4a1b-9728-47c6accd7128-kube-api-access-4m6d7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.687672 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.687755 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.788888 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.789027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.789075 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m6d7\" (UniqueName: \"kubernetes.io/projected/8cc0ed13-8498-4a1b-9728-47c6accd7128-kube-api-access-4m6d7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.794072 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.794693 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.811193 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m6d7\" (UniqueName: \"kubernetes.io/projected/8cc0ed13-8498-4a1b-9728-47c6accd7128-kube-api-access-4m6d7\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:57 crc kubenswrapper[4747]: I0202 09:23:57.972315 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:23:58 crc kubenswrapper[4747]: I0202 09:23:58.539166 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt"] Feb 02 09:23:58 crc kubenswrapper[4747]: I0202 09:23:58.582205 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" event={"ID":"8cc0ed13-8498-4a1b-9728-47c6accd7128","Type":"ContainerStarted","Data":"1313446e7a4fe81e8cb2402f833c1f40bd99a64b78a16b25e4a094e15bf5915e"} Feb 02 09:23:59 crc kubenswrapper[4747]: I0202 09:23:59.598993 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" event={"ID":"8cc0ed13-8498-4a1b-9728-47c6accd7128","Type":"ContainerStarted","Data":"14d5b396c1dcbb52d70dece2dc118028a573ccfd5fab8c86814415d387686b98"} Feb 02 09:23:59 crc kubenswrapper[4747]: I0202 09:23:59.623507 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" podStartSLOduration=1.841167088 podStartE2EDuration="2.623486686s" podCreationTimestamp="2026-02-02 09:23:57 +0000 UTC" firstStartedPulling="2026-02-02 09:23:58.54215696 +0000 UTC m=+1651.086495393" lastFinishedPulling="2026-02-02 09:23:59.324476558 +0000 UTC m=+1651.868814991" observedRunningTime="2026-02-02 09:23:59.616667873 +0000 UTC m=+1652.161006306" watchObservedRunningTime="2026-02-02 09:23:59.623486686 +0000 UTC m=+1652.167825119" Feb 02 09:24:01 crc kubenswrapper[4747]: I0202 09:24:01.043400 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-qqtqd"] Feb 02 09:24:01 crc kubenswrapper[4747]: I0202 09:24:01.052189 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-qqtqd"] Feb 02 09:24:02 crc kubenswrapper[4747]: I0202 09:24:02.353311 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc167b60-b071-4a53-9cac-27a8dd516321" path="/var/lib/kubelet/pods/cc167b60-b071-4a53-9cac-27a8dd516321/volumes" Feb 02 09:24:03 crc kubenswrapper[4747]: I0202 09:24:03.026900 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j5llb"] Feb 02 09:24:03 crc kubenswrapper[4747]: I0202 09:24:03.036072 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-j5llb"] Feb 02 09:24:04 crc kubenswrapper[4747]: I0202 09:24:04.359520 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4675701d-dd13-435f-ada3-53023492cb1e" path="/var/lib/kubelet/pods/4675701d-dd13-435f-ada3-53023492cb1e/volumes" Feb 02 09:24:07 crc kubenswrapper[4747]: I0202 09:24:07.406290 4747 scope.go:117] "RemoveContainer" containerID="b177c0e96f813e1c664183cc01f48afc1955b2119c11e31e031227db4f59e861" Feb 02 09:24:07 crc kubenswrapper[4747]: I0202 09:24:07.450164 4747 scope.go:117] "RemoveContainer" containerID="d684bb3a72710f3d15448c93b8d1b0f0fc98cdda20a2cf9c1c5b4401db9e1592" Feb 02 09:24:07 crc kubenswrapper[4747]: I0202 09:24:07.488978 4747 scope.go:117] "RemoveContainer" containerID="40aba8b534e1c3a42cb1565e2ed91ffcebe9f7b07ef15fa63c9a312e131e2d94" Feb 02 09:24:08 crc kubenswrapper[4747]: I0202 09:24:08.668105 4747 generic.go:334] "Generic (PLEG): container finished" podID="8cc0ed13-8498-4a1b-9728-47c6accd7128" containerID="14d5b396c1dcbb52d70dece2dc118028a573ccfd5fab8c86814415d387686b98" exitCode=0 Feb 02 09:24:08 crc kubenswrapper[4747]: I0202 09:24:08.668411 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" event={"ID":"8cc0ed13-8498-4a1b-9728-47c6accd7128","Type":"ContainerDied","Data":"14d5b396c1dcbb52d70dece2dc118028a573ccfd5fab8c86814415d387686b98"} Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.071700 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.217870 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-inventory\") pod \"8cc0ed13-8498-4a1b-9728-47c6accd7128\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.217963 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m6d7\" (UniqueName: \"kubernetes.io/projected/8cc0ed13-8498-4a1b-9728-47c6accd7128-kube-api-access-4m6d7\") pod \"8cc0ed13-8498-4a1b-9728-47c6accd7128\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.218034 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-ssh-key-openstack-edpm-ipam\") pod \"8cc0ed13-8498-4a1b-9728-47c6accd7128\" (UID: \"8cc0ed13-8498-4a1b-9728-47c6accd7128\") " Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.223816 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cc0ed13-8498-4a1b-9728-47c6accd7128-kube-api-access-4m6d7" (OuterVolumeSpecName: "kube-api-access-4m6d7") pod "8cc0ed13-8498-4a1b-9728-47c6accd7128" (UID: "8cc0ed13-8498-4a1b-9728-47c6accd7128"). InnerVolumeSpecName "kube-api-access-4m6d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.244742 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "8cc0ed13-8498-4a1b-9728-47c6accd7128" (UID: "8cc0ed13-8498-4a1b-9728-47c6accd7128"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.255218 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-inventory" (OuterVolumeSpecName: "inventory") pod "8cc0ed13-8498-4a1b-9728-47c6accd7128" (UID: "8cc0ed13-8498-4a1b-9728-47c6accd7128"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.330154 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.330210 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m6d7\" (UniqueName: \"kubernetes.io/projected/8cc0ed13-8498-4a1b-9728-47c6accd7128-kube-api-access-4m6d7\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.330234 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cc0ed13-8498-4a1b-9728-47c6accd7128-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.689190 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" event={"ID":"8cc0ed13-8498-4a1b-9728-47c6accd7128","Type":"ContainerDied","Data":"1313446e7a4fe81e8cb2402f833c1f40bd99a64b78a16b25e4a094e15bf5915e"} Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.689466 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1313446e7a4fe81e8cb2402f833c1f40bd99a64b78a16b25e4a094e15bf5915e" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.689532 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.842255 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9"] Feb 02 09:24:10 crc kubenswrapper[4747]: E0202 09:24:10.842661 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc0ed13-8498-4a1b-9728-47c6accd7128" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.842684 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc0ed13-8498-4a1b-9728-47c6accd7128" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.842897 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cc0ed13-8498-4a1b-9728-47c6accd7128" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.843536 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.848422 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.849183 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.849228 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.849267 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.849294 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.849435 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.849710 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.853009 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9"] Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.860173 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.993192 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.993485 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.993615 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f97jj\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-kube-api-access-f97jj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.993715 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.993798 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.993882 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.993981 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.994093 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.994201 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.994340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.994438 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.994665 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.994764 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:10 crc kubenswrapper[4747]: I0202 09:24:10.994848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.097207 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.098248 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.098386 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.098553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.098660 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.098839 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.098994 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.099137 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.099267 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.099405 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.099626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.099724 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.099848 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f97jj\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-kube-api-access-f97jj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.099988 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.103592 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.104202 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.104275 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.104491 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.104888 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.105005 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.105510 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.105962 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.106276 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.106586 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.108310 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.110591 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.111699 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.121625 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f97jj\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-kube-api-access-f97jj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.174596 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.340056 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:24:11 crc kubenswrapper[4747]: E0202 09:24:11.340654 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.661135 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9"] Feb 02 09:24:11 crc kubenswrapper[4747]: W0202 09:24:11.663040 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb2b09c4_09a2_44d5_8232_5b3e25921596.slice/crio-9dd97818ac8b27fcfb997cddc596f5dc298c6e9db87d25f8c2cf9b627e25fedb WatchSource:0}: Error finding container 9dd97818ac8b27fcfb997cddc596f5dc298c6e9db87d25f8c2cf9b627e25fedb: Status 404 returned error can't find the container with id 9dd97818ac8b27fcfb997cddc596f5dc298c6e9db87d25f8c2cf9b627e25fedb Feb 02 09:24:11 crc kubenswrapper[4747]: I0202 09:24:11.697419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" event={"ID":"fb2b09c4-09a2-44d5-8232-5b3e25921596","Type":"ContainerStarted","Data":"9dd97818ac8b27fcfb997cddc596f5dc298c6e9db87d25f8c2cf9b627e25fedb"} Feb 02 09:24:12 crc kubenswrapper[4747]: I0202 09:24:12.711338 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" event={"ID":"fb2b09c4-09a2-44d5-8232-5b3e25921596","Type":"ContainerStarted","Data":"fdd31e5e52bdf7b470606b10e27c58e2e5119e726389e055e40d855bd6dfb207"} Feb 02 09:24:12 crc kubenswrapper[4747]: I0202 09:24:12.748166 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" podStartSLOduration=2.321334268 podStartE2EDuration="2.748138643s" podCreationTimestamp="2026-02-02 09:24:10 +0000 UTC" firstStartedPulling="2026-02-02 09:24:11.665460821 +0000 UTC m=+1664.209799254" lastFinishedPulling="2026-02-02 09:24:12.092265196 +0000 UTC m=+1664.636603629" observedRunningTime="2026-02-02 09:24:12.737247886 +0000 UTC m=+1665.281586329" watchObservedRunningTime="2026-02-02 09:24:12.748138643 +0000 UTC m=+1665.292477086" Feb 02 09:24:23 crc kubenswrapper[4747]: I0202 09:24:23.340246 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:24:23 crc kubenswrapper[4747]: E0202 09:24:23.342329 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:24:36 crc kubenswrapper[4747]: I0202 09:24:36.340177 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:24:36 crc kubenswrapper[4747]: E0202 09:24:36.341101 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:24:45 crc kubenswrapper[4747]: I0202 09:24:45.000505 4747 generic.go:334] "Generic (PLEG): container finished" podID="fb2b09c4-09a2-44d5-8232-5b3e25921596" containerID="fdd31e5e52bdf7b470606b10e27c58e2e5119e726389e055e40d855bd6dfb207" exitCode=0 Feb 02 09:24:45 crc kubenswrapper[4747]: I0202 09:24:45.000583 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" event={"ID":"fb2b09c4-09a2-44d5-8232-5b3e25921596","Type":"ContainerDied","Data":"fdd31e5e52bdf7b470606b10e27c58e2e5119e726389e055e40d855bd6dfb207"} Feb 02 09:24:45 crc kubenswrapper[4747]: I0202 09:24:45.051477 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-k5dtm"] Feb 02 09:24:45 crc kubenswrapper[4747]: I0202 09:24:45.060113 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-k5dtm"] Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.351892 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c866e283-1f6a-4eb8-b03b-426ff2d875b9" path="/var/lib/kubelet/pods/c866e283-1f6a-4eb8-b03b-426ff2d875b9/volumes" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.496584 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.608697 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.609704 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ovn-combined-ca-bundle\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.609791 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-bootstrap-combined-ca-bundle\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.609830 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-libvirt-combined-ca-bundle\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.609856 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-nova-combined-ca-bundle\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.609877 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ssh-key-openstack-edpm-ipam\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.609916 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-neutron-metadata-combined-ca-bundle\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.610009 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-ovn-default-certs-0\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.610034 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.610054 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f97jj\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-kube-api-access-f97jj\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.610090 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-telemetry-combined-ca-bundle\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.610174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.610261 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-inventory\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.610288 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-repo-setup-combined-ca-bundle\") pod \"fb2b09c4-09a2-44d5-8232-5b3e25921596\" (UID: \"fb2b09c4-09a2-44d5-8232-5b3e25921596\") " Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.615005 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.615895 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.617385 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.619171 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.621192 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-kube-api-access-f97jj" (OuterVolumeSpecName: "kube-api-access-f97jj") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "kube-api-access-f97jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.621208 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.621272 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.621279 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.621460 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.622701 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.622865 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.626263 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.644905 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.646357 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-inventory" (OuterVolumeSpecName: "inventory") pod "fb2b09c4-09a2-44d5-8232-5b3e25921596" (UID: "fb2b09c4-09a2-44d5-8232-5b3e25921596"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.712988 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713022 4747 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713039 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713055 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713067 4747 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713079 4747 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713091 4747 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713131 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713145 4747 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713158 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713171 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713183 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f97jj\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-kube-api-access-f97jj\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713195 4747 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb2b09c4-09a2-44d5-8232-5b3e25921596-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:46 crc kubenswrapper[4747]: I0202 09:24:46.713209 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/fb2b09c4-09a2-44d5-8232-5b3e25921596-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.023231 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" event={"ID":"fb2b09c4-09a2-44d5-8232-5b3e25921596","Type":"ContainerDied","Data":"9dd97818ac8b27fcfb997cddc596f5dc298c6e9db87d25f8c2cf9b627e25fedb"} Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.023305 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dd97818ac8b27fcfb997cddc596f5dc298c6e9db87d25f8c2cf9b627e25fedb" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.023436 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.142022 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j"] Feb 02 09:24:47 crc kubenswrapper[4747]: E0202 09:24:47.142627 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb2b09c4-09a2-44d5-8232-5b3e25921596" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.142652 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb2b09c4-09a2-44d5-8232-5b3e25921596" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.142877 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb2b09c4-09a2-44d5-8232-5b3e25921596" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.146373 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.150594 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.150590 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.150708 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.150613 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.150594 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.159915 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j"] Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.324424 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb26d\" (UniqueName: \"kubernetes.io/projected/2c161d1a-b3d3-4679-b869-fd3cafa040c8-kube-api-access-lb26d\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.324493 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.324566 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.324625 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.324839 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.427392 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb26d\" (UniqueName: \"kubernetes.io/projected/2c161d1a-b3d3-4679-b869-fd3cafa040c8-kube-api-access-lb26d\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.427442 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.427489 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.427518 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.427550 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.428878 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.432791 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.433281 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.440092 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.452278 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb26d\" (UniqueName: \"kubernetes.io/projected/2c161d1a-b3d3-4679-b869-fd3cafa040c8-kube-api-access-lb26d\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-6xm8j\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:47 crc kubenswrapper[4747]: I0202 09:24:47.472753 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:24:48 crc kubenswrapper[4747]: I0202 09:24:48.064438 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j"] Feb 02 09:24:49 crc kubenswrapper[4747]: I0202 09:24:49.043218 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" event={"ID":"2c161d1a-b3d3-4679-b869-fd3cafa040c8","Type":"ContainerStarted","Data":"85dc49644408520c18f0c2defcadd86ca83b6b8673b385348ad62d700f2b8250"} Feb 02 09:24:49 crc kubenswrapper[4747]: I0202 09:24:49.043579 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" event={"ID":"2c161d1a-b3d3-4679-b869-fd3cafa040c8","Type":"ContainerStarted","Data":"511389b74f9407397818a40fcf5211e79d9a4000988a5a46b4bca1e0b656c561"} Feb 02 09:24:49 crc kubenswrapper[4747]: I0202 09:24:49.069450 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" podStartSLOduration=1.5412584790000001 podStartE2EDuration="2.06942063s" podCreationTimestamp="2026-02-02 09:24:47 +0000 UTC" firstStartedPulling="2026-02-02 09:24:48.069986154 +0000 UTC m=+1700.614324587" lastFinishedPulling="2026-02-02 09:24:48.598148305 +0000 UTC m=+1701.142486738" observedRunningTime="2026-02-02 09:24:49.056642495 +0000 UTC m=+1701.600980978" watchObservedRunningTime="2026-02-02 09:24:49.06942063 +0000 UTC m=+1701.613759103" Feb 02 09:24:49 crc kubenswrapper[4747]: I0202 09:24:49.339694 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:24:49 crc kubenswrapper[4747]: E0202 09:24:49.340084 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:25:03 crc kubenswrapper[4747]: I0202 09:25:03.340341 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:25:03 crc kubenswrapper[4747]: E0202 09:25:03.341228 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:25:07 crc kubenswrapper[4747]: I0202 09:25:07.603132 4747 scope.go:117] "RemoveContainer" containerID="4def1b4fc92e8fddb94c429cf9d4e4f8696f0ecc10b2388ab77580298e087e57" Feb 02 09:25:15 crc kubenswrapper[4747]: I0202 09:25:15.339669 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:25:15 crc kubenswrapper[4747]: E0202 09:25:15.340604 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:25:30 crc kubenswrapper[4747]: I0202 09:25:30.339596 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:25:30 crc kubenswrapper[4747]: E0202 09:25:30.340464 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:25:42 crc kubenswrapper[4747]: I0202 09:25:42.339991 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:25:42 crc kubenswrapper[4747]: E0202 09:25:42.341042 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:25:43 crc kubenswrapper[4747]: I0202 09:25:43.815399 4747 generic.go:334] "Generic (PLEG): container finished" podID="2c161d1a-b3d3-4679-b869-fd3cafa040c8" containerID="85dc49644408520c18f0c2defcadd86ca83b6b8673b385348ad62d700f2b8250" exitCode=0 Feb 02 09:25:43 crc kubenswrapper[4747]: I0202 09:25:43.815481 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" event={"ID":"2c161d1a-b3d3-4679-b869-fd3cafa040c8","Type":"ContainerDied","Data":"85dc49644408520c18f0c2defcadd86ca83b6b8673b385348ad62d700f2b8250"} Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.220919 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.265178 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-inventory\") pod \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.265262 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovn-combined-ca-bundle\") pod \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.265389 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovncontroller-config-0\") pod \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.265423 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ssh-key-openstack-edpm-ipam\") pod \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.265464 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb26d\" (UniqueName: \"kubernetes.io/projected/2c161d1a-b3d3-4679-b869-fd3cafa040c8-kube-api-access-lb26d\") pod \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\" (UID: \"2c161d1a-b3d3-4679-b869-fd3cafa040c8\") " Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.271720 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "2c161d1a-b3d3-4679-b869-fd3cafa040c8" (UID: "2c161d1a-b3d3-4679-b869-fd3cafa040c8"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.272340 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c161d1a-b3d3-4679-b869-fd3cafa040c8-kube-api-access-lb26d" (OuterVolumeSpecName: "kube-api-access-lb26d") pod "2c161d1a-b3d3-4679-b869-fd3cafa040c8" (UID: "2c161d1a-b3d3-4679-b869-fd3cafa040c8"). InnerVolumeSpecName "kube-api-access-lb26d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.293174 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "2c161d1a-b3d3-4679-b869-fd3cafa040c8" (UID: "2c161d1a-b3d3-4679-b869-fd3cafa040c8"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.296564 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "2c161d1a-b3d3-4679-b869-fd3cafa040c8" (UID: "2c161d1a-b3d3-4679-b869-fd3cafa040c8"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.304740 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-inventory" (OuterVolumeSpecName: "inventory") pod "2c161d1a-b3d3-4679-b869-fd3cafa040c8" (UID: "2c161d1a-b3d3-4679-b869-fd3cafa040c8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.367857 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.367895 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.367910 4747 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.367922 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2c161d1a-b3d3-4679-b869-fd3cafa040c8-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.367937 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb26d\" (UniqueName: \"kubernetes.io/projected/2c161d1a-b3d3-4679-b869-fd3cafa040c8-kube-api-access-lb26d\") on node \"crc\" DevicePath \"\"" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.834322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" event={"ID":"2c161d1a-b3d3-4679-b869-fd3cafa040c8","Type":"ContainerDied","Data":"511389b74f9407397818a40fcf5211e79d9a4000988a5a46b4bca1e0b656c561"} Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.834360 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-6xm8j" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.834369 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="511389b74f9407397818a40fcf5211e79d9a4000988a5a46b4bca1e0b656c561" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.998158 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c"] Feb 02 09:25:45 crc kubenswrapper[4747]: E0202 09:25:45.998601 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c161d1a-b3d3-4679-b869-fd3cafa040c8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.998622 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c161d1a-b3d3-4679-b869-fd3cafa040c8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.998787 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c161d1a-b3d3-4679-b869-fd3cafa040c8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 09:25:45 crc kubenswrapper[4747]: I0202 09:25:45.999617 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.005555 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.006030 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.006232 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.006451 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.006619 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.009895 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.013046 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c"] Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.079347 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.079433 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.079562 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.079627 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zx298\" (UniqueName: \"kubernetes.io/projected/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-kube-api-access-zx298\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.079654 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.079679 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.181855 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.182013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.182100 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.182166 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zx298\" (UniqueName: \"kubernetes.io/projected/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-kube-api-access-zx298\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.182203 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.182239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.187499 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.187687 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.188263 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.189870 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.190078 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.203487 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zx298\" (UniqueName: \"kubernetes.io/projected/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-kube-api-access-zx298\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.344067 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:25:46 crc kubenswrapper[4747]: I0202 09:25:46.911950 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c"] Feb 02 09:25:47 crc kubenswrapper[4747]: I0202 09:25:47.857382 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" event={"ID":"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8","Type":"ContainerStarted","Data":"594adde51405d728ad3951b97b741e22ea40712e0dd190c12914d87c293d5d61"} Feb 02 09:25:47 crc kubenswrapper[4747]: I0202 09:25:47.857752 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" event={"ID":"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8","Type":"ContainerStarted","Data":"5222d22376024a1a0a762900ed8f07f88efbc656ed5212bac1863865ae159996"} Feb 02 09:25:47 crc kubenswrapper[4747]: I0202 09:25:47.883900 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" podStartSLOduration=2.439719646 podStartE2EDuration="2.883875952s" podCreationTimestamp="2026-02-02 09:25:45 +0000 UTC" firstStartedPulling="2026-02-02 09:25:46.923991661 +0000 UTC m=+1759.468330094" lastFinishedPulling="2026-02-02 09:25:47.368147967 +0000 UTC m=+1759.912486400" observedRunningTime="2026-02-02 09:25:47.875586072 +0000 UTC m=+1760.419924525" watchObservedRunningTime="2026-02-02 09:25:47.883875952 +0000 UTC m=+1760.428214395" Feb 02 09:25:56 crc kubenswrapper[4747]: I0202 09:25:56.340229 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:25:56 crc kubenswrapper[4747]: E0202 09:25:56.341461 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:26:09 crc kubenswrapper[4747]: I0202 09:26:09.339604 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:26:09 crc kubenswrapper[4747]: E0202 09:26:09.340362 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:26:23 crc kubenswrapper[4747]: I0202 09:26:23.339459 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:26:23 crc kubenswrapper[4747]: E0202 09:26:23.340419 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:26:30 crc kubenswrapper[4747]: I0202 09:26:30.228610 4747 generic.go:334] "Generic (PLEG): container finished" podID="0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" containerID="594adde51405d728ad3951b97b741e22ea40712e0dd190c12914d87c293d5d61" exitCode=0 Feb 02 09:26:30 crc kubenswrapper[4747]: I0202 09:26:30.228708 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" event={"ID":"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8","Type":"ContainerDied","Data":"594adde51405d728ad3951b97b741e22ea40712e0dd190c12914d87c293d5d61"} Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.660466 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.760847 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-ssh-key-openstack-edpm-ipam\") pod \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.761029 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zx298\" (UniqueName: \"kubernetes.io/projected/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-kube-api-access-zx298\") pod \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.761080 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-inventory\") pod \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.761236 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-metadata-combined-ca-bundle\") pod \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.761398 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-nova-metadata-neutron-config-0\") pod \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.761432 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\" (UID: \"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8\") " Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.766636 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-kube-api-access-zx298" (OuterVolumeSpecName: "kube-api-access-zx298") pod "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" (UID: "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8"). InnerVolumeSpecName "kube-api-access-zx298". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.767157 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" (UID: "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.789805 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" (UID: "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.790587 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" (UID: "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.790856 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" (UID: "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.801122 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-inventory" (OuterVolumeSpecName: "inventory") pod "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" (UID: "0238bbca-ba9a-4e80-bdc3-1fc0467c30c8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.864192 4747 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.864239 4747 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.864254 4747 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.864267 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.864282 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zx298\" (UniqueName: \"kubernetes.io/projected/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-kube-api-access-zx298\") on node \"crc\" DevicePath \"\"" Feb 02 09:26:31 crc kubenswrapper[4747]: I0202 09:26:31.864295 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0238bbca-ba9a-4e80-bdc3-1fc0467c30c8-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.245177 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" event={"ID":"0238bbca-ba9a-4e80-bdc3-1fc0467c30c8","Type":"ContainerDied","Data":"5222d22376024a1a0a762900ed8f07f88efbc656ed5212bac1863865ae159996"} Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.245536 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5222d22376024a1a0a762900ed8f07f88efbc656ed5212bac1863865ae159996" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.245377 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.382769 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc"] Feb 02 09:26:32 crc kubenswrapper[4747]: E0202 09:26:32.383288 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.383323 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.383763 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0238bbca-ba9a-4e80-bdc3-1fc0467c30c8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.384742 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.385252 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc"] Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.386869 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.386916 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.387353 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.387383 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.388813 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.485807 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.486035 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.486344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.486468 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24whf\" (UniqueName: \"kubernetes.io/projected/5e4244f7-511f-4193-b74d-6d018e944b45-kube-api-access-24whf\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.486766 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.589734 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.589864 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.589978 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.590011 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.590053 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24whf\" (UniqueName: \"kubernetes.io/projected/5e4244f7-511f-4193-b74d-6d018e944b45-kube-api-access-24whf\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.594740 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.594862 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.598728 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.600927 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.606027 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24whf\" (UniqueName: \"kubernetes.io/projected/5e4244f7-511f-4193-b74d-6d018e944b45-kube-api-access-24whf\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:32 crc kubenswrapper[4747]: I0202 09:26:32.701441 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:26:33 crc kubenswrapper[4747]: I0202 09:26:33.200531 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc"] Feb 02 09:26:33 crc kubenswrapper[4747]: I0202 09:26:33.252820 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" event={"ID":"5e4244f7-511f-4193-b74d-6d018e944b45","Type":"ContainerStarted","Data":"0b1c975b43255846c98bc41e37205f40bd5c0bb1e3e72885b8d6c040745c28ff"} Feb 02 09:26:34 crc kubenswrapper[4747]: I0202 09:26:34.262038 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" event={"ID":"5e4244f7-511f-4193-b74d-6d018e944b45","Type":"ContainerStarted","Data":"1e895c27bbb57a33e7852685c85dc77f9254d81dd232f15fcbbba48a226a29fa"} Feb 02 09:26:34 crc kubenswrapper[4747]: I0202 09:26:34.285440 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" podStartSLOduration=1.734479476 podStartE2EDuration="2.285418971s" podCreationTimestamp="2026-02-02 09:26:32 +0000 UTC" firstStartedPulling="2026-02-02 09:26:33.205464306 +0000 UTC m=+1805.749802739" lastFinishedPulling="2026-02-02 09:26:33.756403801 +0000 UTC m=+1806.300742234" observedRunningTime="2026-02-02 09:26:34.274490045 +0000 UTC m=+1806.818828498" watchObservedRunningTime="2026-02-02 09:26:34.285418971 +0000 UTC m=+1806.829757404" Feb 02 09:26:37 crc kubenswrapper[4747]: I0202 09:26:37.339792 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:26:37 crc kubenswrapper[4747]: E0202 09:26:37.340442 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:26:49 crc kubenswrapper[4747]: I0202 09:26:49.339255 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:26:49 crc kubenswrapper[4747]: E0202 09:26:49.339968 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:27:01 crc kubenswrapper[4747]: I0202 09:27:01.340041 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:27:01 crc kubenswrapper[4747]: E0202 09:27:01.342214 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:27:13 crc kubenswrapper[4747]: I0202 09:27:13.340483 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:27:13 crc kubenswrapper[4747]: E0202 09:27:13.341222 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:27:27 crc kubenswrapper[4747]: I0202 09:27:27.339678 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:27:28 crc kubenswrapper[4747]: I0202 09:27:28.449109 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"16ba4f39ada504b2f3ab4c4c4de8b833a6bb57718aa5bd360987ebfaec09a47a"} Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.426485 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nknjl"] Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.429327 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.443024 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nknjl"] Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.486554 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hks7h\" (UniqueName: \"kubernetes.io/projected/70688b70-fb9b-43f2-91c9-e3807f9e065c-kube-api-access-hks7h\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.486853 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-catalog-content\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.486948 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-utilities\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.588800 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hks7h\" (UniqueName: \"kubernetes.io/projected/70688b70-fb9b-43f2-91c9-e3807f9e065c-kube-api-access-hks7h\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.588952 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-catalog-content\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.588979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-utilities\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.589471 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-utilities\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.589764 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-catalog-content\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.614476 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hks7h\" (UniqueName: \"kubernetes.io/projected/70688b70-fb9b-43f2-91c9-e3807f9e065c-kube-api-access-hks7h\") pod \"certified-operators-nknjl\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:43 crc kubenswrapper[4747]: I0202 09:28:43.754196 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:44 crc kubenswrapper[4747]: I0202 09:28:44.284859 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nknjl"] Feb 02 09:28:45 crc kubenswrapper[4747]: I0202 09:28:45.142401 4747 generic.go:334] "Generic (PLEG): container finished" podID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerID="54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6" exitCode=0 Feb 02 09:28:45 crc kubenswrapper[4747]: I0202 09:28:45.142472 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nknjl" event={"ID":"70688b70-fb9b-43f2-91c9-e3807f9e065c","Type":"ContainerDied","Data":"54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6"} Feb 02 09:28:45 crc kubenswrapper[4747]: I0202 09:28:45.142735 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nknjl" event={"ID":"70688b70-fb9b-43f2-91c9-e3807f9e065c","Type":"ContainerStarted","Data":"f1f050d4cea97d8a7f362267fdf23b3dc518ad40fd2aa7abee1a8471e9be8d08"} Feb 02 09:28:45 crc kubenswrapper[4747]: I0202 09:28:45.145515 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:28:46 crc kubenswrapper[4747]: I0202 09:28:46.151895 4747 generic.go:334] "Generic (PLEG): container finished" podID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerID="d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e" exitCode=0 Feb 02 09:28:46 crc kubenswrapper[4747]: I0202 09:28:46.151961 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nknjl" event={"ID":"70688b70-fb9b-43f2-91c9-e3807f9e065c","Type":"ContainerDied","Data":"d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e"} Feb 02 09:28:47 crc kubenswrapper[4747]: I0202 09:28:47.164862 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nknjl" event={"ID":"70688b70-fb9b-43f2-91c9-e3807f9e065c","Type":"ContainerStarted","Data":"72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200"} Feb 02 09:28:47 crc kubenswrapper[4747]: I0202 09:28:47.185178 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nknjl" podStartSLOduration=2.7929319870000002 podStartE2EDuration="4.185160063s" podCreationTimestamp="2026-02-02 09:28:43 +0000 UTC" firstStartedPulling="2026-02-02 09:28:45.145150823 +0000 UTC m=+1937.689489276" lastFinishedPulling="2026-02-02 09:28:46.537378919 +0000 UTC m=+1939.081717352" observedRunningTime="2026-02-02 09:28:47.18341783 +0000 UTC m=+1939.727756283" watchObservedRunningTime="2026-02-02 09:28:47.185160063 +0000 UTC m=+1939.729498496" Feb 02 09:28:53 crc kubenswrapper[4747]: I0202 09:28:53.754723 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:53 crc kubenswrapper[4747]: I0202 09:28:53.756600 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:53 crc kubenswrapper[4747]: I0202 09:28:53.796819 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:54 crc kubenswrapper[4747]: I0202 09:28:54.264527 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:54 crc kubenswrapper[4747]: I0202 09:28:54.318970 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nknjl"] Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.240752 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nknjl" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="registry-server" containerID="cri-o://72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200" gracePeriod=2 Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.698132 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.832818 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-utilities\") pod \"70688b70-fb9b-43f2-91c9-e3807f9e065c\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.833320 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-catalog-content\") pod \"70688b70-fb9b-43f2-91c9-e3807f9e065c\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.833479 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hks7h\" (UniqueName: \"kubernetes.io/projected/70688b70-fb9b-43f2-91c9-e3807f9e065c-kube-api-access-hks7h\") pod \"70688b70-fb9b-43f2-91c9-e3807f9e065c\" (UID: \"70688b70-fb9b-43f2-91c9-e3807f9e065c\") " Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.837568 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-utilities" (OuterVolumeSpecName: "utilities") pod "70688b70-fb9b-43f2-91c9-e3807f9e065c" (UID: "70688b70-fb9b-43f2-91c9-e3807f9e065c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.840231 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70688b70-fb9b-43f2-91c9-e3807f9e065c-kube-api-access-hks7h" (OuterVolumeSpecName: "kube-api-access-hks7h") pod "70688b70-fb9b-43f2-91c9-e3807f9e065c" (UID: "70688b70-fb9b-43f2-91c9-e3807f9e065c"). InnerVolumeSpecName "kube-api-access-hks7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.937643 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hks7h\" (UniqueName: \"kubernetes.io/projected/70688b70-fb9b-43f2-91c9-e3807f9e065c-kube-api-access-hks7h\") on node \"crc\" DevicePath \"\"" Feb 02 09:28:56 crc kubenswrapper[4747]: I0202 09:28:56.937681 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.203878 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70688b70-fb9b-43f2-91c9-e3807f9e065c" (UID: "70688b70-fb9b-43f2-91c9-e3807f9e065c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.242516 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70688b70-fb9b-43f2-91c9-e3807f9e065c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.260700 4747 generic.go:334] "Generic (PLEG): container finished" podID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerID="72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200" exitCode=0 Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.260740 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nknjl" event={"ID":"70688b70-fb9b-43f2-91c9-e3807f9e065c","Type":"ContainerDied","Data":"72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200"} Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.260798 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nknjl" event={"ID":"70688b70-fb9b-43f2-91c9-e3807f9e065c","Type":"ContainerDied","Data":"f1f050d4cea97d8a7f362267fdf23b3dc518ad40fd2aa7abee1a8471e9be8d08"} Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.260822 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nknjl" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.260824 4747 scope.go:117] "RemoveContainer" containerID="72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.295829 4747 scope.go:117] "RemoveContainer" containerID="d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.298489 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nknjl"] Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.312051 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nknjl"] Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.332515 4747 scope.go:117] "RemoveContainer" containerID="54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.364757 4747 scope.go:117] "RemoveContainer" containerID="72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200" Feb 02 09:28:57 crc kubenswrapper[4747]: E0202 09:28:57.366584 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200\": container with ID starting with 72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200 not found: ID does not exist" containerID="72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.366618 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200"} err="failed to get container status \"72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200\": rpc error: code = NotFound desc = could not find container \"72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200\": container with ID starting with 72fc3e02972063989ee7f41503245b017361789a72d90358b8733dd25e627200 not found: ID does not exist" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.366639 4747 scope.go:117] "RemoveContainer" containerID="d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e" Feb 02 09:28:57 crc kubenswrapper[4747]: E0202 09:28:57.366988 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e\": container with ID starting with d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e not found: ID does not exist" containerID="d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.367012 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e"} err="failed to get container status \"d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e\": rpc error: code = NotFound desc = could not find container \"d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e\": container with ID starting with d569116877210862e7470f0637cf143b072223130572b81a5f5c53bac3726d9e not found: ID does not exist" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.367025 4747 scope.go:117] "RemoveContainer" containerID="54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6" Feb 02 09:28:57 crc kubenswrapper[4747]: E0202 09:28:57.367643 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6\": container with ID starting with 54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6 not found: ID does not exist" containerID="54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6" Feb 02 09:28:57 crc kubenswrapper[4747]: I0202 09:28:57.367666 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6"} err="failed to get container status \"54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6\": rpc error: code = NotFound desc = could not find container \"54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6\": container with ID starting with 54c80ae21ac1fa8b2303ae6bb4a00ca1b9231b3ed4f9e6750f9f4164af616fd6 not found: ID does not exist" Feb 02 09:28:58 crc kubenswrapper[4747]: I0202 09:28:58.355444 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" path="/var/lib/kubelet/pods/70688b70-fb9b-43f2-91c9-e3807f9e065c/volumes" Feb 02 09:29:50 crc kubenswrapper[4747]: I0202 09:29:50.518530 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:29:50 crc kubenswrapper[4747]: I0202 09:29:50.519100 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.162822 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl"] Feb 02 09:30:00 crc kubenswrapper[4747]: E0202 09:30:00.163971 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="extract-content" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.163987 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="extract-content" Feb 02 09:30:00 crc kubenswrapper[4747]: E0202 09:30:00.164014 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="registry-server" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.164022 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="registry-server" Feb 02 09:30:00 crc kubenswrapper[4747]: E0202 09:30:00.164048 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="extract-utilities" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.164055 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="extract-utilities" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.164249 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="70688b70-fb9b-43f2-91c9-e3807f9e065c" containerName="registry-server" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.164980 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.170152 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.170367 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.179796 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl"] Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.327952 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/945b00e2-39de-46e3-86b4-55568f8c3c5b-config-volume\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.328018 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snqzk\" (UniqueName: \"kubernetes.io/projected/945b00e2-39de-46e3-86b4-55568f8c3c5b-kube-api-access-snqzk\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.328066 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/945b00e2-39de-46e3-86b4-55568f8c3c5b-secret-volume\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.431184 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/945b00e2-39de-46e3-86b4-55568f8c3c5b-config-volume\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.431391 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snqzk\" (UniqueName: \"kubernetes.io/projected/945b00e2-39de-46e3-86b4-55568f8c3c5b-kube-api-access-snqzk\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.431550 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/945b00e2-39de-46e3-86b4-55568f8c3c5b-secret-volume\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.432438 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/945b00e2-39de-46e3-86b4-55568f8c3c5b-config-volume\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.439896 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/945b00e2-39de-46e3-86b4-55568f8c3c5b-secret-volume\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.453823 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snqzk\" (UniqueName: \"kubernetes.io/projected/945b00e2-39de-46e3-86b4-55568f8c3c5b-kube-api-access-snqzk\") pod \"collect-profiles-29500410-6bsnl\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.497315 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:00 crc kubenswrapper[4747]: I0202 09:30:00.942853 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl"] Feb 02 09:30:01 crc kubenswrapper[4747]: I0202 09:30:01.873022 4747 generic.go:334] "Generic (PLEG): container finished" podID="945b00e2-39de-46e3-86b4-55568f8c3c5b" containerID="d4529566d6ce89554927a0dba937a2f25e44390796919b66ee89c5eed978c771" exitCode=0 Feb 02 09:30:01 crc kubenswrapper[4747]: I0202 09:30:01.873067 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" event={"ID":"945b00e2-39de-46e3-86b4-55568f8c3c5b","Type":"ContainerDied","Data":"d4529566d6ce89554927a0dba937a2f25e44390796919b66ee89c5eed978c771"} Feb 02 09:30:01 crc kubenswrapper[4747]: I0202 09:30:01.873363 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" event={"ID":"945b00e2-39de-46e3-86b4-55568f8c3c5b","Type":"ContainerStarted","Data":"1de4974277d2a8d09635047e12322e84264b652d84af517002c4762d73a69155"} Feb 02 09:30:01 crc kubenswrapper[4747]: I0202 09:30:01.874915 4747 generic.go:334] "Generic (PLEG): container finished" podID="5e4244f7-511f-4193-b74d-6d018e944b45" containerID="1e895c27bbb57a33e7852685c85dc77f9254d81dd232f15fcbbba48a226a29fa" exitCode=0 Feb 02 09:30:01 crc kubenswrapper[4747]: I0202 09:30:01.874986 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" event={"ID":"5e4244f7-511f-4193-b74d-6d018e944b45","Type":"ContainerDied","Data":"1e895c27bbb57a33e7852685c85dc77f9254d81dd232f15fcbbba48a226a29fa"} Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.332033 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.339552 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.501281 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-combined-ca-bundle\") pod \"5e4244f7-511f-4193-b74d-6d018e944b45\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.501386 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snqzk\" (UniqueName: \"kubernetes.io/projected/945b00e2-39de-46e3-86b4-55568f8c3c5b-kube-api-access-snqzk\") pod \"945b00e2-39de-46e3-86b4-55568f8c3c5b\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.501405 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/945b00e2-39de-46e3-86b4-55568f8c3c5b-config-volume\") pod \"945b00e2-39de-46e3-86b4-55568f8c3c5b\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.501429 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24whf\" (UniqueName: \"kubernetes.io/projected/5e4244f7-511f-4193-b74d-6d018e944b45-kube-api-access-24whf\") pod \"5e4244f7-511f-4193-b74d-6d018e944b45\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.501455 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-ssh-key-openstack-edpm-ipam\") pod \"5e4244f7-511f-4193-b74d-6d018e944b45\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.501571 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/945b00e2-39de-46e3-86b4-55568f8c3c5b-secret-volume\") pod \"945b00e2-39de-46e3-86b4-55568f8c3c5b\" (UID: \"945b00e2-39de-46e3-86b4-55568f8c3c5b\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.502291 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-inventory\") pod \"5e4244f7-511f-4193-b74d-6d018e944b45\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.503793 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-secret-0\") pod \"5e4244f7-511f-4193-b74d-6d018e944b45\" (UID: \"5e4244f7-511f-4193-b74d-6d018e944b45\") " Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.508519 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e4244f7-511f-4193-b74d-6d018e944b45-kube-api-access-24whf" (OuterVolumeSpecName: "kube-api-access-24whf") pod "5e4244f7-511f-4193-b74d-6d018e944b45" (UID: "5e4244f7-511f-4193-b74d-6d018e944b45"). InnerVolumeSpecName "kube-api-access-24whf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.511427 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "5e4244f7-511f-4193-b74d-6d018e944b45" (UID: "5e4244f7-511f-4193-b74d-6d018e944b45"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.511540 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/945b00e2-39de-46e3-86b4-55568f8c3c5b-config-volume" (OuterVolumeSpecName: "config-volume") pod "945b00e2-39de-46e3-86b4-55568f8c3c5b" (UID: "945b00e2-39de-46e3-86b4-55568f8c3c5b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.511659 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/945b00e2-39de-46e3-86b4-55568f8c3c5b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "945b00e2-39de-46e3-86b4-55568f8c3c5b" (UID: "945b00e2-39de-46e3-86b4-55568f8c3c5b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.521421 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/945b00e2-39de-46e3-86b4-55568f8c3c5b-kube-api-access-snqzk" (OuterVolumeSpecName: "kube-api-access-snqzk") pod "945b00e2-39de-46e3-86b4-55568f8c3c5b" (UID: "945b00e2-39de-46e3-86b4-55568f8c3c5b"). InnerVolumeSpecName "kube-api-access-snqzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.529587 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snqzk\" (UniqueName: \"kubernetes.io/projected/945b00e2-39de-46e3-86b4-55568f8c3c5b-kube-api-access-snqzk\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.529639 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/945b00e2-39de-46e3-86b4-55568f8c3c5b-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.529656 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24whf\" (UniqueName: \"kubernetes.io/projected/5e4244f7-511f-4193-b74d-6d018e944b45-kube-api-access-24whf\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.529669 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/945b00e2-39de-46e3-86b4-55568f8c3c5b-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.529695 4747 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.537835 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "5e4244f7-511f-4193-b74d-6d018e944b45" (UID: "5e4244f7-511f-4193-b74d-6d018e944b45"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.549917 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "5e4244f7-511f-4193-b74d-6d018e944b45" (UID: "5e4244f7-511f-4193-b74d-6d018e944b45"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.555437 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-inventory" (OuterVolumeSpecName: "inventory") pod "5e4244f7-511f-4193-b74d-6d018e944b45" (UID: "5e4244f7-511f-4193-b74d-6d018e944b45"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.631720 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.631776 4747 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.631792 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5e4244f7-511f-4193-b74d-6d018e944b45-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.898886 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" event={"ID":"945b00e2-39de-46e3-86b4-55568f8c3c5b","Type":"ContainerDied","Data":"1de4974277d2a8d09635047e12322e84264b652d84af517002c4762d73a69155"} Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.899270 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1de4974277d2a8d09635047e12322e84264b652d84af517002c4762d73a69155" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.898973 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500410-6bsnl" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.901193 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" event={"ID":"5e4244f7-511f-4193-b74d-6d018e944b45","Type":"ContainerDied","Data":"0b1c975b43255846c98bc41e37205f40bd5c0bb1e3e72885b8d6c040745c28ff"} Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.901239 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b1c975b43255846c98bc41e37205f40bd5c0bb1e3e72885b8d6c040745c28ff" Feb 02 09:30:03 crc kubenswrapper[4747]: I0202 09:30:03.901281 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.004332 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts"] Feb 02 09:30:04 crc kubenswrapper[4747]: E0202 09:30:04.004824 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="945b00e2-39de-46e3-86b4-55568f8c3c5b" containerName="collect-profiles" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.004847 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="945b00e2-39de-46e3-86b4-55568f8c3c5b" containerName="collect-profiles" Feb 02 09:30:04 crc kubenswrapper[4747]: E0202 09:30:04.004870 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e4244f7-511f-4193-b74d-6d018e944b45" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.004879 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e4244f7-511f-4193-b74d-6d018e944b45" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.005170 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="945b00e2-39de-46e3-86b4-55568f8c3c5b" containerName="collect-profiles" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.005195 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e4244f7-511f-4193-b74d-6d018e944b45" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.006564 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.009164 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.009306 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.009357 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.009506 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.009523 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.009551 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.009706 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.020168 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts"] Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.039690 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.039743 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.039792 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.039868 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.039910 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.039947 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4k5k\" (UniqueName: \"kubernetes.io/projected/a072b9d8-fc03-4e5f-9470-458501cb4a01-kube-api-access-p4k5k\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.040019 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.040051 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.040101 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.141542 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.141597 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.141639 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.141712 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.141783 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.142054 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4k5k\" (UniqueName: \"kubernetes.io/projected/a072b9d8-fc03-4e5f-9470-458501cb4a01-kube-api-access-p4k5k\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.142739 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.142780 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.142837 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.143845 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.146429 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.146503 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.148558 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.149904 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.149913 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.150477 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.153043 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.162148 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4k5k\" (UniqueName: \"kubernetes.io/projected/a072b9d8-fc03-4e5f-9470-458501cb4a01-kube-api-access-p4k5k\") pod \"nova-edpm-deployment-openstack-edpm-ipam-fpkts\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.340517 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.433612 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh"] Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.443318 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500365-j9tdh"] Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.682704 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts"] Feb 02 09:30:04 crc kubenswrapper[4747]: W0202 09:30:04.687250 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda072b9d8_fc03_4e5f_9470_458501cb4a01.slice/crio-86b37c045eb3a38d3f6fd8a7d4cdbcf2b2d89cd10a22636f0935705f6652c428 WatchSource:0}: Error finding container 86b37c045eb3a38d3f6fd8a7d4cdbcf2b2d89cd10a22636f0935705f6652c428: Status 404 returned error can't find the container with id 86b37c045eb3a38d3f6fd8a7d4cdbcf2b2d89cd10a22636f0935705f6652c428 Feb 02 09:30:04 crc kubenswrapper[4747]: I0202 09:30:04.910438 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" event={"ID":"a072b9d8-fc03-4e5f-9470-458501cb4a01","Type":"ContainerStarted","Data":"86b37c045eb3a38d3f6fd8a7d4cdbcf2b2d89cd10a22636f0935705f6652c428"} Feb 02 09:30:05 crc kubenswrapper[4747]: I0202 09:30:05.920071 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" event={"ID":"a072b9d8-fc03-4e5f-9470-458501cb4a01","Type":"ContainerStarted","Data":"574391c60ad020ac652c90aa7e0fa04ac792cd8641091083ee54406bd183096b"} Feb 02 09:30:05 crc kubenswrapper[4747]: I0202 09:30:05.941250 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" podStartSLOduration=2.44184606 podStartE2EDuration="2.941231846s" podCreationTimestamp="2026-02-02 09:30:03 +0000 UTC" firstStartedPulling="2026-02-02 09:30:04.690075949 +0000 UTC m=+2017.234414382" lastFinishedPulling="2026-02-02 09:30:05.189461695 +0000 UTC m=+2017.733800168" observedRunningTime="2026-02-02 09:30:05.936868026 +0000 UTC m=+2018.481206469" watchObservedRunningTime="2026-02-02 09:30:05.941231846 +0000 UTC m=+2018.485570269" Feb 02 09:30:06 crc kubenswrapper[4747]: I0202 09:30:06.357707 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d91844f8-e11c-4a91-86ae-be01d3d901fe" path="/var/lib/kubelet/pods/d91844f8-e11c-4a91-86ae-be01d3d901fe/volumes" Feb 02 09:30:07 crc kubenswrapper[4747]: I0202 09:30:07.751356 4747 scope.go:117] "RemoveContainer" containerID="1f9d9574ff027eb138ac342b2e917c64efcc81cab83a0552a1a9e0f771e86b95" Feb 02 09:30:20 crc kubenswrapper[4747]: I0202 09:30:20.518746 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:30:20 crc kubenswrapper[4747]: I0202 09:30:20.519466 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:30:50 crc kubenswrapper[4747]: I0202 09:30:50.519161 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:30:50 crc kubenswrapper[4747]: I0202 09:30:50.519723 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:30:50 crc kubenswrapper[4747]: I0202 09:30:50.519770 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:30:50 crc kubenswrapper[4747]: I0202 09:30:50.520566 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"16ba4f39ada504b2f3ab4c4c4de8b833a6bb57718aa5bd360987ebfaec09a47a"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:30:50 crc kubenswrapper[4747]: I0202 09:30:50.520626 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://16ba4f39ada504b2f3ab4c4c4de8b833a6bb57718aa5bd360987ebfaec09a47a" gracePeriod=600 Feb 02 09:30:51 crc kubenswrapper[4747]: I0202 09:30:51.341239 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="16ba4f39ada504b2f3ab4c4c4de8b833a6bb57718aa5bd360987ebfaec09a47a" exitCode=0 Feb 02 09:30:51 crc kubenswrapper[4747]: I0202 09:30:51.341300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"16ba4f39ada504b2f3ab4c4c4de8b833a6bb57718aa5bd360987ebfaec09a47a"} Feb 02 09:30:51 crc kubenswrapper[4747]: I0202 09:30:51.341881 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36"} Feb 02 09:30:51 crc kubenswrapper[4747]: I0202 09:30:51.341909 4747 scope.go:117] "RemoveContainer" containerID="200563619d1cf6bf3739e9c84529f09a3244f0e165fd86afcdd704466159a9d5" Feb 02 09:32:03 crc kubenswrapper[4747]: I0202 09:32:03.132336 4747 generic.go:334] "Generic (PLEG): container finished" podID="a072b9d8-fc03-4e5f-9470-458501cb4a01" containerID="574391c60ad020ac652c90aa7e0fa04ac792cd8641091083ee54406bd183096b" exitCode=0 Feb 02 09:32:03 crc kubenswrapper[4747]: I0202 09:32:03.132400 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" event={"ID":"a072b9d8-fc03-4e5f-9470-458501cb4a01","Type":"ContainerDied","Data":"574391c60ad020ac652c90aa7e0fa04ac792cd8641091083ee54406bd183096b"} Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.550886 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.672050 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-extra-config-0\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673015 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-ssh-key-openstack-edpm-ipam\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673401 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-0\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673466 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-0\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673499 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-combined-ca-bundle\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673533 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-inventory\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673562 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-1\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673598 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4k5k\" (UniqueName: \"kubernetes.io/projected/a072b9d8-fc03-4e5f-9470-458501cb4a01-kube-api-access-p4k5k\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.673641 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-1\") pod \"a072b9d8-fc03-4e5f-9470-458501cb4a01\" (UID: \"a072b9d8-fc03-4e5f-9470-458501cb4a01\") " Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.681161 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a072b9d8-fc03-4e5f-9470-458501cb4a01-kube-api-access-p4k5k" (OuterVolumeSpecName: "kube-api-access-p4k5k") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "kube-api-access-p4k5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.682079 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.700863 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.705277 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.707473 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.708619 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.710630 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.711615 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-inventory" (OuterVolumeSpecName: "inventory") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.711818 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a072b9d8-fc03-4e5f-9470-458501cb4a01" (UID: "a072b9d8-fc03-4e5f-9470-458501cb4a01"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775826 4747 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775856 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775867 4747 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775876 4747 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775903 4747 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775912 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775920 4747 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775927 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4k5k\" (UniqueName: \"kubernetes.io/projected/a072b9d8-fc03-4e5f-9470-458501cb4a01-kube-api-access-p4k5k\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:04 crc kubenswrapper[4747]: I0202 09:32:04.775978 4747 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a072b9d8-fc03-4e5f-9470-458501cb4a01-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.154038 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" event={"ID":"a072b9d8-fc03-4e5f-9470-458501cb4a01","Type":"ContainerDied","Data":"86b37c045eb3a38d3f6fd8a7d4cdbcf2b2d89cd10a22636f0935705f6652c428"} Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.154353 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86b37c045eb3a38d3f6fd8a7d4cdbcf2b2d89cd10a22636f0935705f6652c428" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.154117 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-fpkts" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.253861 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6"] Feb 02 09:32:05 crc kubenswrapper[4747]: E0202 09:32:05.254318 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a072b9d8-fc03-4e5f-9470-458501cb4a01" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.254335 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a072b9d8-fc03-4e5f-9470-458501cb4a01" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.254607 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a072b9d8-fc03-4e5f-9470-458501cb4a01" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.255378 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.257379 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rg4cq" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.257861 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.257984 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.259068 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.262666 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.268221 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6"] Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.287194 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.287352 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.287459 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.287550 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl4zk\" (UniqueName: \"kubernetes.io/projected/efbfdb02-1bdd-471d-9054-a59de7b96f4c-kube-api-access-pl4zk\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.287643 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.287763 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.287814 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.389231 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.389354 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.389389 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl4zk\" (UniqueName: \"kubernetes.io/projected/efbfdb02-1bdd-471d-9054-a59de7b96f4c-kube-api-access-pl4zk\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.389442 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.389639 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.389682 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.389717 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.393872 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.393971 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.393992 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.398161 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.404780 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.407556 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.409006 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl4zk\" (UniqueName: \"kubernetes.io/projected/efbfdb02-1bdd-471d-9054-a59de7b96f4c-kube-api-access-pl4zk\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-cklb6\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:05 crc kubenswrapper[4747]: I0202 09:32:05.577595 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:32:06 crc kubenswrapper[4747]: I0202 09:32:06.092566 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6"] Feb 02 09:32:06 crc kubenswrapper[4747]: I0202 09:32:06.163700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" event={"ID":"efbfdb02-1bdd-471d-9054-a59de7b96f4c","Type":"ContainerStarted","Data":"4c070344ceb784515e85060feebac89ce3b610c55cfc69a9c104829dc127724d"} Feb 02 09:32:07 crc kubenswrapper[4747]: I0202 09:32:07.171510 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" event={"ID":"efbfdb02-1bdd-471d-9054-a59de7b96f4c","Type":"ContainerStarted","Data":"d253a8e77178dcec18fc5131f0dded245b9a35b36757ec33b755216397dc172a"} Feb 02 09:32:07 crc kubenswrapper[4747]: I0202 09:32:07.200061 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" podStartSLOduration=1.721185076 podStartE2EDuration="2.200033194s" podCreationTimestamp="2026-02-02 09:32:05 +0000 UTC" firstStartedPulling="2026-02-02 09:32:06.095681475 +0000 UTC m=+2138.640019908" lastFinishedPulling="2026-02-02 09:32:06.574529593 +0000 UTC m=+2139.118868026" observedRunningTime="2026-02-02 09:32:07.186253717 +0000 UTC m=+2139.730592170" watchObservedRunningTime="2026-02-02 09:32:07.200033194 +0000 UTC m=+2139.744371677" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.463752 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hl547"] Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.468633 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.478833 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl547"] Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.643820 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-catalog-content\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.644472 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-utilities\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.644534 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8wp6\" (UniqueName: \"kubernetes.io/projected/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-kube-api-access-b8wp6\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.746500 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8wp6\" (UniqueName: \"kubernetes.io/projected/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-kube-api-access-b8wp6\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.746605 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-catalog-content\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.746701 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-utilities\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.747535 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-utilities\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.747913 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-catalog-content\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.777889 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8wp6\" (UniqueName: \"kubernetes.io/projected/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-kube-api-access-b8wp6\") pod \"redhat-marketplace-hl547\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:31 crc kubenswrapper[4747]: I0202 09:32:31.793994 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:32 crc kubenswrapper[4747]: I0202 09:32:32.282455 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl547"] Feb 02 09:32:32 crc kubenswrapper[4747]: W0202 09:32:32.285066 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f508a9a_85fe_4b26_b85f_9b01d173b4ef.slice/crio-eb20a4ba9f1e3fbf2b22e3b6a112eedbe9a118eaf224549bed5e8e11510e555e WatchSource:0}: Error finding container eb20a4ba9f1e3fbf2b22e3b6a112eedbe9a118eaf224549bed5e8e11510e555e: Status 404 returned error can't find the container with id eb20a4ba9f1e3fbf2b22e3b6a112eedbe9a118eaf224549bed5e8e11510e555e Feb 02 09:32:32 crc kubenswrapper[4747]: I0202 09:32:32.432148 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl547" event={"ID":"7f508a9a-85fe-4b26-b85f-9b01d173b4ef","Type":"ContainerStarted","Data":"eb20a4ba9f1e3fbf2b22e3b6a112eedbe9a118eaf224549bed5e8e11510e555e"} Feb 02 09:32:33 crc kubenswrapper[4747]: I0202 09:32:33.444582 4747 generic.go:334] "Generic (PLEG): container finished" podID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerID="330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c" exitCode=0 Feb 02 09:32:33 crc kubenswrapper[4747]: I0202 09:32:33.444650 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl547" event={"ID":"7f508a9a-85fe-4b26-b85f-9b01d173b4ef","Type":"ContainerDied","Data":"330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c"} Feb 02 09:32:34 crc kubenswrapper[4747]: I0202 09:32:34.454514 4747 generic.go:334] "Generic (PLEG): container finished" podID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerID="8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5" exitCode=0 Feb 02 09:32:34 crc kubenswrapper[4747]: I0202 09:32:34.454562 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl547" event={"ID":"7f508a9a-85fe-4b26-b85f-9b01d173b4ef","Type":"ContainerDied","Data":"8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5"} Feb 02 09:32:35 crc kubenswrapper[4747]: I0202 09:32:35.465524 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl547" event={"ID":"7f508a9a-85fe-4b26-b85f-9b01d173b4ef","Type":"ContainerStarted","Data":"aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16"} Feb 02 09:32:35 crc kubenswrapper[4747]: I0202 09:32:35.489483 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hl547" podStartSLOduration=3.062794888 podStartE2EDuration="4.489461422s" podCreationTimestamp="2026-02-02 09:32:31 +0000 UTC" firstStartedPulling="2026-02-02 09:32:33.448044297 +0000 UTC m=+2165.992382730" lastFinishedPulling="2026-02-02 09:32:34.874710831 +0000 UTC m=+2167.419049264" observedRunningTime="2026-02-02 09:32:35.48424979 +0000 UTC m=+2168.028588243" watchObservedRunningTime="2026-02-02 09:32:35.489461422 +0000 UTC m=+2168.033799855" Feb 02 09:32:41 crc kubenswrapper[4747]: I0202 09:32:41.794850 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:41 crc kubenswrapper[4747]: I0202 09:32:41.795208 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:41 crc kubenswrapper[4747]: I0202 09:32:41.859853 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:42 crc kubenswrapper[4747]: I0202 09:32:42.567343 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:42 crc kubenswrapper[4747]: I0202 09:32:42.615592 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl547"] Feb 02 09:32:44 crc kubenswrapper[4747]: I0202 09:32:44.536762 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hl547" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="registry-server" containerID="cri-o://aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16" gracePeriod=2 Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.009519 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.038049 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-catalog-content\") pod \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.038154 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-utilities\") pod \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.038305 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8wp6\" (UniqueName: \"kubernetes.io/projected/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-kube-api-access-b8wp6\") pod \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\" (UID: \"7f508a9a-85fe-4b26-b85f-9b01d173b4ef\") " Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.038948 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-utilities" (OuterVolumeSpecName: "utilities") pod "7f508a9a-85fe-4b26-b85f-9b01d173b4ef" (UID: "7f508a9a-85fe-4b26-b85f-9b01d173b4ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.045133 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-kube-api-access-b8wp6" (OuterVolumeSpecName: "kube-api-access-b8wp6") pod "7f508a9a-85fe-4b26-b85f-9b01d173b4ef" (UID: "7f508a9a-85fe-4b26-b85f-9b01d173b4ef"). InnerVolumeSpecName "kube-api-access-b8wp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.066154 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f508a9a-85fe-4b26-b85f-9b01d173b4ef" (UID: "7f508a9a-85fe-4b26-b85f-9b01d173b4ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.141428 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8wp6\" (UniqueName: \"kubernetes.io/projected/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-kube-api-access-b8wp6\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.141469 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.141480 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f508a9a-85fe-4b26-b85f-9b01d173b4ef-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.546174 4747 generic.go:334] "Generic (PLEG): container finished" podID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerID="aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16" exitCode=0 Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.546218 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl547" event={"ID":"7f508a9a-85fe-4b26-b85f-9b01d173b4ef","Type":"ContainerDied","Data":"aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16"} Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.546230 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hl547" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.546247 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hl547" event={"ID":"7f508a9a-85fe-4b26-b85f-9b01d173b4ef","Type":"ContainerDied","Data":"eb20a4ba9f1e3fbf2b22e3b6a112eedbe9a118eaf224549bed5e8e11510e555e"} Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.546264 4747 scope.go:117] "RemoveContainer" containerID="aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.575579 4747 scope.go:117] "RemoveContainer" containerID="8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.583128 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl547"] Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.591751 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hl547"] Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.603697 4747 scope.go:117] "RemoveContainer" containerID="330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.644315 4747 scope.go:117] "RemoveContainer" containerID="aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16" Feb 02 09:32:45 crc kubenswrapper[4747]: E0202 09:32:45.644893 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16\": container with ID starting with aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16 not found: ID does not exist" containerID="aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.644958 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16"} err="failed to get container status \"aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16\": rpc error: code = NotFound desc = could not find container \"aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16\": container with ID starting with aa96edbfc85043cb7fa9600854454fafca0ffd48effaa509a7a4cf6803154f16 not found: ID does not exist" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.644986 4747 scope.go:117] "RemoveContainer" containerID="8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5" Feb 02 09:32:45 crc kubenswrapper[4747]: E0202 09:32:45.645405 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5\": container with ID starting with 8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5 not found: ID does not exist" containerID="8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.645441 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5"} err="failed to get container status \"8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5\": rpc error: code = NotFound desc = could not find container \"8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5\": container with ID starting with 8ee6755032caec0490768a7e2be52d960a6b11fbe8d1cfc29f6ef7dd08fba4a5 not found: ID does not exist" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.645463 4747 scope.go:117] "RemoveContainer" containerID="330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c" Feb 02 09:32:45 crc kubenswrapper[4747]: E0202 09:32:45.645774 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c\": container with ID starting with 330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c not found: ID does not exist" containerID="330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c" Feb 02 09:32:45 crc kubenswrapper[4747]: I0202 09:32:45.645805 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c"} err="failed to get container status \"330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c\": rpc error: code = NotFound desc = could not find container \"330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c\": container with ID starting with 330dd6f0a1664d1524db255aa5626bd862d4bbd33fb69e010b7a5a9359d2904c not found: ID does not exist" Feb 02 09:32:46 crc kubenswrapper[4747]: I0202 09:32:46.350523 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" path="/var/lib/kubelet/pods/7f508a9a-85fe-4b26-b85f-9b01d173b4ef/volumes" Feb 02 09:32:50 crc kubenswrapper[4747]: I0202 09:32:50.518658 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:32:50 crc kubenswrapper[4747]: I0202 09:32:50.519236 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.512499 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sjwnt"] Feb 02 09:32:55 crc kubenswrapper[4747]: E0202 09:32:55.513560 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="extract-content" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.513578 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="extract-content" Feb 02 09:32:55 crc kubenswrapper[4747]: E0202 09:32:55.513595 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="extract-utilities" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.513603 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="extract-utilities" Feb 02 09:32:55 crc kubenswrapper[4747]: E0202 09:32:55.513613 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="registry-server" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.513622 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="registry-server" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.513861 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f508a9a-85fe-4b26-b85f-9b01d173b4ef" containerName="registry-server" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.515507 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.521823 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sjwnt"] Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.654167 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggddl\" (UniqueName: \"kubernetes.io/projected/fd553b1c-3fa4-4698-bb66-c555a402df82-kube-api-access-ggddl\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.654320 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-catalog-content\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.654403 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-utilities\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.756873 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggddl\" (UniqueName: \"kubernetes.io/projected/fd553b1c-3fa4-4698-bb66-c555a402df82-kube-api-access-ggddl\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.756950 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-catalog-content\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.756982 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-utilities\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.757503 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-utilities\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.758102 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-catalog-content\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.777517 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggddl\" (UniqueName: \"kubernetes.io/projected/fd553b1c-3fa4-4698-bb66-c555a402df82-kube-api-access-ggddl\") pod \"redhat-operators-sjwnt\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:55 crc kubenswrapper[4747]: I0202 09:32:55.833059 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:32:56 crc kubenswrapper[4747]: I0202 09:32:56.295117 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sjwnt"] Feb 02 09:32:56 crc kubenswrapper[4747]: I0202 09:32:56.645753 4747 generic.go:334] "Generic (PLEG): container finished" podID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerID="bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353" exitCode=0 Feb 02 09:32:56 crc kubenswrapper[4747]: I0202 09:32:56.645817 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjwnt" event={"ID":"fd553b1c-3fa4-4698-bb66-c555a402df82","Type":"ContainerDied","Data":"bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353"} Feb 02 09:32:56 crc kubenswrapper[4747]: I0202 09:32:56.645882 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjwnt" event={"ID":"fd553b1c-3fa4-4698-bb66-c555a402df82","Type":"ContainerStarted","Data":"60019fbff2ea1f72c8d9204a340c5d80f1ce228298efc2e04c5a1847f48fe8e8"} Feb 02 09:32:58 crc kubenswrapper[4747]: I0202 09:32:58.662225 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjwnt" event={"ID":"fd553b1c-3fa4-4698-bb66-c555a402df82","Type":"ContainerStarted","Data":"89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb"} Feb 02 09:32:59 crc kubenswrapper[4747]: I0202 09:32:59.671326 4747 generic.go:334] "Generic (PLEG): container finished" podID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerID="89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb" exitCode=0 Feb 02 09:32:59 crc kubenswrapper[4747]: I0202 09:32:59.671417 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjwnt" event={"ID":"fd553b1c-3fa4-4698-bb66-c555a402df82","Type":"ContainerDied","Data":"89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb"} Feb 02 09:33:01 crc kubenswrapper[4747]: I0202 09:33:01.688220 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjwnt" event={"ID":"fd553b1c-3fa4-4698-bb66-c555a402df82","Type":"ContainerStarted","Data":"b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2"} Feb 02 09:33:01 crc kubenswrapper[4747]: I0202 09:33:01.709800 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sjwnt" podStartSLOduration=3.227472598 podStartE2EDuration="6.709781058s" podCreationTimestamp="2026-02-02 09:32:55 +0000 UTC" firstStartedPulling="2026-02-02 09:32:56.648349263 +0000 UTC m=+2189.192687696" lastFinishedPulling="2026-02-02 09:33:00.130657713 +0000 UTC m=+2192.674996156" observedRunningTime="2026-02-02 09:33:01.702531285 +0000 UTC m=+2194.246869728" watchObservedRunningTime="2026-02-02 09:33:01.709781058 +0000 UTC m=+2194.254119491" Feb 02 09:33:05 crc kubenswrapper[4747]: I0202 09:33:05.833779 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:33:05 crc kubenswrapper[4747]: I0202 09:33:05.834301 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.259376 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r8lmj"] Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.262566 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.291206 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r8lmj"] Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.464420 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j6vj\" (UniqueName: \"kubernetes.io/projected/9cceeaf2-9918-49d3-9a63-1ff00239d199-kube-api-access-4j6vj\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.464572 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-catalog-content\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.464638 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-utilities\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.566555 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j6vj\" (UniqueName: \"kubernetes.io/projected/9cceeaf2-9918-49d3-9a63-1ff00239d199-kube-api-access-4j6vj\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.566640 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-catalog-content\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.566676 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-utilities\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.567325 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-catalog-content\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.567417 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-utilities\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.585846 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j6vj\" (UniqueName: \"kubernetes.io/projected/9cceeaf2-9918-49d3-9a63-1ff00239d199-kube-api-access-4j6vj\") pod \"community-operators-r8lmj\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.588343 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:06 crc kubenswrapper[4747]: I0202 09:33:06.878280 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sjwnt" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="registry-server" probeResult="failure" output=< Feb 02 09:33:06 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Feb 02 09:33:06 crc kubenswrapper[4747]: > Feb 02 09:33:07 crc kubenswrapper[4747]: I0202 09:33:07.036588 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r8lmj"] Feb 02 09:33:07 crc kubenswrapper[4747]: I0202 09:33:07.747184 4747 generic.go:334] "Generic (PLEG): container finished" podID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerID="99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa" exitCode=0 Feb 02 09:33:07 crc kubenswrapper[4747]: I0202 09:33:07.747309 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8lmj" event={"ID":"9cceeaf2-9918-49d3-9a63-1ff00239d199","Type":"ContainerDied","Data":"99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa"} Feb 02 09:33:07 crc kubenswrapper[4747]: I0202 09:33:07.747538 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8lmj" event={"ID":"9cceeaf2-9918-49d3-9a63-1ff00239d199","Type":"ContainerStarted","Data":"5fb28d06e14666466f3af5ac35b0dbad7c3ee81fb525878d3030933479af1303"} Feb 02 09:33:08 crc kubenswrapper[4747]: I0202 09:33:08.756918 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8lmj" event={"ID":"9cceeaf2-9918-49d3-9a63-1ff00239d199","Type":"ContainerStarted","Data":"f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8"} Feb 02 09:33:09 crc kubenswrapper[4747]: I0202 09:33:09.783263 4747 generic.go:334] "Generic (PLEG): container finished" podID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerID="f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8" exitCode=0 Feb 02 09:33:09 crc kubenswrapper[4747]: I0202 09:33:09.783975 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8lmj" event={"ID":"9cceeaf2-9918-49d3-9a63-1ff00239d199","Type":"ContainerDied","Data":"f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8"} Feb 02 09:33:10 crc kubenswrapper[4747]: I0202 09:33:10.801736 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8lmj" event={"ID":"9cceeaf2-9918-49d3-9a63-1ff00239d199","Type":"ContainerStarted","Data":"9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6"} Feb 02 09:33:10 crc kubenswrapper[4747]: I0202 09:33:10.832357 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r8lmj" podStartSLOduration=2.397735888 podStartE2EDuration="4.832329045s" podCreationTimestamp="2026-02-02 09:33:06 +0000 UTC" firstStartedPulling="2026-02-02 09:33:07.749184108 +0000 UTC m=+2200.293522541" lastFinishedPulling="2026-02-02 09:33:10.183777255 +0000 UTC m=+2202.728115698" observedRunningTime="2026-02-02 09:33:10.823149364 +0000 UTC m=+2203.367487787" watchObservedRunningTime="2026-02-02 09:33:10.832329045 +0000 UTC m=+2203.376667478" Feb 02 09:33:15 crc kubenswrapper[4747]: I0202 09:33:15.886291 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:33:15 crc kubenswrapper[4747]: I0202 09:33:15.944305 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:33:16 crc kubenswrapper[4747]: I0202 09:33:16.218709 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sjwnt"] Feb 02 09:33:16 crc kubenswrapper[4747]: I0202 09:33:16.593341 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:16 crc kubenswrapper[4747]: I0202 09:33:16.593400 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:16 crc kubenswrapper[4747]: I0202 09:33:16.661396 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:16 crc kubenswrapper[4747]: I0202 09:33:16.896813 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:17 crc kubenswrapper[4747]: I0202 09:33:17.866898 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sjwnt" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="registry-server" containerID="cri-o://b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2" gracePeriod=2 Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.281895 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.394716 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggddl\" (UniqueName: \"kubernetes.io/projected/fd553b1c-3fa4-4698-bb66-c555a402df82-kube-api-access-ggddl\") pod \"fd553b1c-3fa4-4698-bb66-c555a402df82\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.394906 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-catalog-content\") pod \"fd553b1c-3fa4-4698-bb66-c555a402df82\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.395015 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-utilities\") pod \"fd553b1c-3fa4-4698-bb66-c555a402df82\" (UID: \"fd553b1c-3fa4-4698-bb66-c555a402df82\") " Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.396185 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-utilities" (OuterVolumeSpecName: "utilities") pod "fd553b1c-3fa4-4698-bb66-c555a402df82" (UID: "fd553b1c-3fa4-4698-bb66-c555a402df82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.399747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd553b1c-3fa4-4698-bb66-c555a402df82-kube-api-access-ggddl" (OuterVolumeSpecName: "kube-api-access-ggddl") pod "fd553b1c-3fa4-4698-bb66-c555a402df82" (UID: "fd553b1c-3fa4-4698-bb66-c555a402df82"). InnerVolumeSpecName "kube-api-access-ggddl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.497813 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggddl\" (UniqueName: \"kubernetes.io/projected/fd553b1c-3fa4-4698-bb66-c555a402df82-kube-api-access-ggddl\") on node \"crc\" DevicePath \"\"" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.497855 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.511335 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd553b1c-3fa4-4698-bb66-c555a402df82" (UID: "fd553b1c-3fa4-4698-bb66-c555a402df82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.600175 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd553b1c-3fa4-4698-bb66-c555a402df82-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.879497 4747 generic.go:334] "Generic (PLEG): container finished" podID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerID="b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2" exitCode=0 Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.879564 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjwnt" event={"ID":"fd553b1c-3fa4-4698-bb66-c555a402df82","Type":"ContainerDied","Data":"b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2"} Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.879607 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjwnt" event={"ID":"fd553b1c-3fa4-4698-bb66-c555a402df82","Type":"ContainerDied","Data":"60019fbff2ea1f72c8d9204a340c5d80f1ce228298efc2e04c5a1847f48fe8e8"} Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.879638 4747 scope.go:117] "RemoveContainer" containerID="b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.879855 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjwnt" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.914420 4747 scope.go:117] "RemoveContainer" containerID="89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.929086 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sjwnt"] Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.946909 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sjwnt"] Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.954533 4747 scope.go:117] "RemoveContainer" containerID="bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.987295 4747 scope.go:117] "RemoveContainer" containerID="b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2" Feb 02 09:33:18 crc kubenswrapper[4747]: E0202 09:33:18.987735 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2\": container with ID starting with b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2 not found: ID does not exist" containerID="b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.987785 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2"} err="failed to get container status \"b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2\": rpc error: code = NotFound desc = could not find container \"b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2\": container with ID starting with b88d1184890c9ad6981330c33d4af4f8386f4cde439fcc46e0d644b62600d5a2 not found: ID does not exist" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.987812 4747 scope.go:117] "RemoveContainer" containerID="89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb" Feb 02 09:33:18 crc kubenswrapper[4747]: E0202 09:33:18.988190 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb\": container with ID starting with 89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb not found: ID does not exist" containerID="89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.988250 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb"} err="failed to get container status \"89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb\": rpc error: code = NotFound desc = could not find container \"89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb\": container with ID starting with 89336fda65674dedd9ee33ce302d30e8ba632f1748c12147e4f75cbddb9cd8bb not found: ID does not exist" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.988281 4747 scope.go:117] "RemoveContainer" containerID="bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353" Feb 02 09:33:18 crc kubenswrapper[4747]: E0202 09:33:18.988586 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353\": container with ID starting with bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353 not found: ID does not exist" containerID="bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353" Feb 02 09:33:18 crc kubenswrapper[4747]: I0202 09:33:18.988636 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353"} err="failed to get container status \"bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353\": rpc error: code = NotFound desc = could not find container \"bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353\": container with ID starting with bd336d69cf1d4a8c62492f59b39111c88d904681f68b77144833df96fedfc353 not found: ID does not exist" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.023558 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r8lmj"] Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.023787 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r8lmj" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="registry-server" containerID="cri-o://9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6" gracePeriod=2 Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.448907 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.620281 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-catalog-content\") pod \"9cceeaf2-9918-49d3-9a63-1ff00239d199\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.620483 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-utilities\") pod \"9cceeaf2-9918-49d3-9a63-1ff00239d199\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.620535 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4j6vj\" (UniqueName: \"kubernetes.io/projected/9cceeaf2-9918-49d3-9a63-1ff00239d199-kube-api-access-4j6vj\") pod \"9cceeaf2-9918-49d3-9a63-1ff00239d199\" (UID: \"9cceeaf2-9918-49d3-9a63-1ff00239d199\") " Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.621127 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-utilities" (OuterVolumeSpecName: "utilities") pod "9cceeaf2-9918-49d3-9a63-1ff00239d199" (UID: "9cceeaf2-9918-49d3-9a63-1ff00239d199"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.632092 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cceeaf2-9918-49d3-9a63-1ff00239d199-kube-api-access-4j6vj" (OuterVolumeSpecName: "kube-api-access-4j6vj") pod "9cceeaf2-9918-49d3-9a63-1ff00239d199" (UID: "9cceeaf2-9918-49d3-9a63-1ff00239d199"). InnerVolumeSpecName "kube-api-access-4j6vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.687277 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cceeaf2-9918-49d3-9a63-1ff00239d199" (UID: "9cceeaf2-9918-49d3-9a63-1ff00239d199"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.722310 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.722347 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4j6vj\" (UniqueName: \"kubernetes.io/projected/9cceeaf2-9918-49d3-9a63-1ff00239d199-kube-api-access-4j6vj\") on node \"crc\" DevicePath \"\"" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.722362 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cceeaf2-9918-49d3-9a63-1ff00239d199-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.892753 4747 generic.go:334] "Generic (PLEG): container finished" podID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerID="9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6" exitCode=0 Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.892804 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8lmj" event={"ID":"9cceeaf2-9918-49d3-9a63-1ff00239d199","Type":"ContainerDied","Data":"9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6"} Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.892829 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r8lmj" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.893195 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r8lmj" event={"ID":"9cceeaf2-9918-49d3-9a63-1ff00239d199","Type":"ContainerDied","Data":"5fb28d06e14666466f3af5ac35b0dbad7c3ee81fb525878d3030933479af1303"} Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.893222 4747 scope.go:117] "RemoveContainer" containerID="9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.914778 4747 scope.go:117] "RemoveContainer" containerID="f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.930053 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r8lmj"] Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.938404 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r8lmj"] Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.953728 4747 scope.go:117] "RemoveContainer" containerID="99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.973810 4747 scope.go:117] "RemoveContainer" containerID="9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6" Feb 02 09:33:19 crc kubenswrapper[4747]: E0202 09:33:19.974342 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6\": container with ID starting with 9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6 not found: ID does not exist" containerID="9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.974397 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6"} err="failed to get container status \"9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6\": rpc error: code = NotFound desc = could not find container \"9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6\": container with ID starting with 9a7105b665d251b185782541be5572894a7f02f98cbf06f664fc5b76782256c6 not found: ID does not exist" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.974429 4747 scope.go:117] "RemoveContainer" containerID="f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8" Feb 02 09:33:19 crc kubenswrapper[4747]: E0202 09:33:19.974859 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8\": container with ID starting with f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8 not found: ID does not exist" containerID="f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.974887 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8"} err="failed to get container status \"f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8\": rpc error: code = NotFound desc = could not find container \"f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8\": container with ID starting with f6224dfae1b727b92fed992ea4574daea215caa387cd872669c59946f5c4d5f8 not found: ID does not exist" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.974904 4747 scope.go:117] "RemoveContainer" containerID="99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa" Feb 02 09:33:19 crc kubenswrapper[4747]: E0202 09:33:19.975282 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa\": container with ID starting with 99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa not found: ID does not exist" containerID="99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa" Feb 02 09:33:19 crc kubenswrapper[4747]: I0202 09:33:19.975326 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa"} err="failed to get container status \"99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa\": rpc error: code = NotFound desc = could not find container \"99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa\": container with ID starting with 99d3345fc92b37f48285e7d0eeab61e6783248081223c0007839c2b630f20afa not found: ID does not exist" Feb 02 09:33:20 crc kubenswrapper[4747]: I0202 09:33:20.348881 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" path="/var/lib/kubelet/pods/9cceeaf2-9918-49d3-9a63-1ff00239d199/volumes" Feb 02 09:33:20 crc kubenswrapper[4747]: I0202 09:33:20.349705 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" path="/var/lib/kubelet/pods/fd553b1c-3fa4-4698-bb66-c555a402df82/volumes" Feb 02 09:33:20 crc kubenswrapper[4747]: I0202 09:33:20.518399 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:33:20 crc kubenswrapper[4747]: I0202 09:33:20.518480 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:33:50 crc kubenswrapper[4747]: I0202 09:33:50.518846 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:33:50 crc kubenswrapper[4747]: I0202 09:33:50.519789 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:33:50 crc kubenswrapper[4747]: I0202 09:33:50.519875 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:33:50 crc kubenswrapper[4747]: I0202 09:33:50.521081 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:33:50 crc kubenswrapper[4747]: I0202 09:33:50.521151 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" gracePeriod=600 Feb 02 09:33:50 crc kubenswrapper[4747]: E0202 09:33:50.639730 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:33:51 crc kubenswrapper[4747]: I0202 09:33:51.220501 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" exitCode=0 Feb 02 09:33:51 crc kubenswrapper[4747]: I0202 09:33:51.221111 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36"} Feb 02 09:33:51 crc kubenswrapper[4747]: I0202 09:33:51.221186 4747 scope.go:117] "RemoveContainer" containerID="16ba4f39ada504b2f3ab4c4c4de8b833a6bb57718aa5bd360987ebfaec09a47a" Feb 02 09:33:51 crc kubenswrapper[4747]: I0202 09:33:51.222299 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:33:51 crc kubenswrapper[4747]: E0202 09:33:51.222723 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:34:01 crc kubenswrapper[4747]: E0202 09:34:01.774840 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Feb 02 09:34:04 crc kubenswrapper[4747]: I0202 09:34:04.340035 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:34:04 crc kubenswrapper[4747]: E0202 09:34:04.340821 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:34:13 crc kubenswrapper[4747]: I0202 09:34:13.444565 4747 generic.go:334] "Generic (PLEG): container finished" podID="efbfdb02-1bdd-471d-9054-a59de7b96f4c" containerID="d253a8e77178dcec18fc5131f0dded245b9a35b36757ec33b755216397dc172a" exitCode=0 Feb 02 09:34:13 crc kubenswrapper[4747]: I0202 09:34:13.444671 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" event={"ID":"efbfdb02-1bdd-471d-9054-a59de7b96f4c","Type":"ContainerDied","Data":"d253a8e77178dcec18fc5131f0dded245b9a35b36757ec33b755216397dc172a"} Feb 02 09:34:14 crc kubenswrapper[4747]: I0202 09:34:14.873661 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.003658 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-2\") pod \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.004091 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-0\") pod \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.004208 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl4zk\" (UniqueName: \"kubernetes.io/projected/efbfdb02-1bdd-471d-9054-a59de7b96f4c-kube-api-access-pl4zk\") pod \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.004396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ssh-key-openstack-edpm-ipam\") pod \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.004538 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-telemetry-combined-ca-bundle\") pod \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.004693 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-inventory\") pod \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.004906 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-1\") pod \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\" (UID: \"efbfdb02-1bdd-471d-9054-a59de7b96f4c\") " Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.012087 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "efbfdb02-1bdd-471d-9054-a59de7b96f4c" (UID: "efbfdb02-1bdd-471d-9054-a59de7b96f4c"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.013081 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efbfdb02-1bdd-471d-9054-a59de7b96f4c-kube-api-access-pl4zk" (OuterVolumeSpecName: "kube-api-access-pl4zk") pod "efbfdb02-1bdd-471d-9054-a59de7b96f4c" (UID: "efbfdb02-1bdd-471d-9054-a59de7b96f4c"). InnerVolumeSpecName "kube-api-access-pl4zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.038781 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "efbfdb02-1bdd-471d-9054-a59de7b96f4c" (UID: "efbfdb02-1bdd-471d-9054-a59de7b96f4c"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.039752 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-inventory" (OuterVolumeSpecName: "inventory") pod "efbfdb02-1bdd-471d-9054-a59de7b96f4c" (UID: "efbfdb02-1bdd-471d-9054-a59de7b96f4c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.055179 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "efbfdb02-1bdd-471d-9054-a59de7b96f4c" (UID: "efbfdb02-1bdd-471d-9054-a59de7b96f4c"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.067169 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "efbfdb02-1bdd-471d-9054-a59de7b96f4c" (UID: "efbfdb02-1bdd-471d-9054-a59de7b96f4c"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.069645 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "efbfdb02-1bdd-471d-9054-a59de7b96f4c" (UID: "efbfdb02-1bdd-471d-9054-a59de7b96f4c"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.106715 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.106750 4747 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.106768 4747 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.106782 4747 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.106795 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl4zk\" (UniqueName: \"kubernetes.io/projected/efbfdb02-1bdd-471d-9054-a59de7b96f4c-kube-api-access-pl4zk\") on node \"crc\" DevicePath \"\"" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.106809 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.106820 4747 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efbfdb02-1bdd-471d-9054-a59de7b96f4c-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.339574 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:34:15 crc kubenswrapper[4747]: E0202 09:34:15.340699 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.462639 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" event={"ID":"efbfdb02-1bdd-471d-9054-a59de7b96f4c","Type":"ContainerDied","Data":"4c070344ceb784515e85060feebac89ce3b610c55cfc69a9c104829dc127724d"} Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.462677 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c070344ceb784515e85060feebac89ce3b610c55cfc69a9c104829dc127724d" Feb 02 09:34:15 crc kubenswrapper[4747]: I0202 09:34:15.462707 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-cklb6" Feb 02 09:34:26 crc kubenswrapper[4747]: I0202 09:34:26.339748 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:34:26 crc kubenswrapper[4747]: E0202 09:34:26.340502 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:34:41 crc kubenswrapper[4747]: I0202 09:34:41.340036 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:34:41 crc kubenswrapper[4747]: E0202 09:34:41.340752 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:34:54 crc kubenswrapper[4747]: I0202 09:34:54.340229 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:34:54 crc kubenswrapper[4747]: E0202 09:34:54.340988 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:35:08 crc kubenswrapper[4747]: I0202 09:35:08.345326 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:35:08 crc kubenswrapper[4747]: E0202 09:35:08.346922 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.041723 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Feb 02 09:35:12 crc kubenswrapper[4747]: E0202 09:35:12.042720 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="extract-utilities" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.042740 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="extract-utilities" Feb 02 09:35:12 crc kubenswrapper[4747]: E0202 09:35:12.042760 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="extract-content" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.042769 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="extract-content" Feb 02 09:35:12 crc kubenswrapper[4747]: E0202 09:35:12.042779 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="extract-utilities" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.042788 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="extract-utilities" Feb 02 09:35:12 crc kubenswrapper[4747]: E0202 09:35:12.042799 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="registry-server" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.042808 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="registry-server" Feb 02 09:35:12 crc kubenswrapper[4747]: E0202 09:35:12.042833 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="registry-server" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.042840 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="registry-server" Feb 02 09:35:12 crc kubenswrapper[4747]: E0202 09:35:12.042857 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efbfdb02-1bdd-471d-9054-a59de7b96f4c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.042866 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="efbfdb02-1bdd-471d-9054-a59de7b96f4c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 02 09:35:12 crc kubenswrapper[4747]: E0202 09:35:12.042889 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="extract-content" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.042897 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="extract-content" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.043112 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="efbfdb02-1bdd-471d-9054-a59de7b96f4c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.043131 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cceeaf2-9918-49d3-9a63-1ff00239d199" containerName="registry-server" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.043158 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd553b1c-3fa4-4698-bb66-c555a402df82" containerName="registry-server" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.044017 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.046608 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.046762 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mp7z4" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.046772 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.047616 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.066006 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206332 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206475 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206514 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qmcm\" (UniqueName: \"kubernetes.io/projected/84239355-717c-437f-abf5-b5df1b3a0806-kube-api-access-4qmcm\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206670 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206838 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206871 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206919 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.206999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-config-data\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.308966 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309023 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qmcm\" (UniqueName: \"kubernetes.io/projected/84239355-717c-437f-abf5-b5df1b3a0806-kube-api-access-4qmcm\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309152 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309169 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309191 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309217 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-config-data\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309266 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.309284 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.310228 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.310313 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.310249 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.311392 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.311686 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-config-data\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.317346 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.317966 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.318090 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.335492 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qmcm\" (UniqueName: \"kubernetes.io/projected/84239355-717c-437f-abf5-b5df1b3a0806-kube-api-access-4qmcm\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.361062 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.373256 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.842179 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Feb 02 09:35:12 crc kubenswrapper[4747]: W0202 09:35:12.847226 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84239355_717c_437f_abf5_b5df1b3a0806.slice/crio-a260236b49a099bb0bae66d8f42f9798f7d1521cda076a5e166641974f5a0bcb WatchSource:0}: Error finding container a260236b49a099bb0bae66d8f42f9798f7d1521cda076a5e166641974f5a0bcb: Status 404 returned error can't find the container with id a260236b49a099bb0bae66d8f42f9798f7d1521cda076a5e166641974f5a0bcb Feb 02 09:35:12 crc kubenswrapper[4747]: I0202 09:35:12.854247 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:35:13 crc kubenswrapper[4747]: I0202 09:35:13.040336 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"84239355-717c-437f-abf5-b5df1b3a0806","Type":"ContainerStarted","Data":"a260236b49a099bb0bae66d8f42f9798f7d1521cda076a5e166641974f5a0bcb"} Feb 02 09:35:23 crc kubenswrapper[4747]: I0202 09:35:23.339926 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:35:23 crc kubenswrapper[4747]: E0202 09:35:23.341706 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:35:35 crc kubenswrapper[4747]: I0202 09:35:35.340122 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:35:35 crc kubenswrapper[4747]: E0202 09:35:35.340605 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:35:43 crc kubenswrapper[4747]: E0202 09:35:43.265201 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Feb 02 09:35:43 crc kubenswrapper[4747]: E0202 09:35:43.266607 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4qmcm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(84239355-717c-437f-abf5-b5df1b3a0806): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 09:35:43 crc kubenswrapper[4747]: E0202 09:35:43.269821 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="84239355-717c-437f-abf5-b5df1b3a0806" Feb 02 09:35:43 crc kubenswrapper[4747]: E0202 09:35:43.323579 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="84239355-717c-437f-abf5-b5df1b3a0806" Feb 02 09:35:46 crc kubenswrapper[4747]: I0202 09:35:46.339354 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:35:46 crc kubenswrapper[4747]: E0202 09:35:46.339998 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:35:56 crc kubenswrapper[4747]: I0202 09:35:56.763471 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Feb 02 09:35:58 crc kubenswrapper[4747]: I0202 09:35:58.346014 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:35:58 crc kubenswrapper[4747]: E0202 09:35:58.347653 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:35:58 crc kubenswrapper[4747]: I0202 09:35:58.447803 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"84239355-717c-437f-abf5-b5df1b3a0806","Type":"ContainerStarted","Data":"db3d3dc2ea52f421407ec8a3ac7e8d5db634e56eddbb8fb17d8b0f441d0cef3c"} Feb 02 09:35:58 crc kubenswrapper[4747]: I0202 09:35:58.464579 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.557388602 podStartE2EDuration="47.464564509s" podCreationTimestamp="2026-02-02 09:35:11 +0000 UTC" firstStartedPulling="2026-02-02 09:35:12.854035552 +0000 UTC m=+2325.398373985" lastFinishedPulling="2026-02-02 09:35:56.761211459 +0000 UTC m=+2369.305549892" observedRunningTime="2026-02-02 09:35:58.463440981 +0000 UTC m=+2371.007779414" watchObservedRunningTime="2026-02-02 09:35:58.464564509 +0000 UTC m=+2371.008902942" Feb 02 09:36:11 crc kubenswrapper[4747]: I0202 09:36:11.340036 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:36:11 crc kubenswrapper[4747]: E0202 09:36:11.341005 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:36:25 crc kubenswrapper[4747]: I0202 09:36:25.339795 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:36:25 crc kubenswrapper[4747]: E0202 09:36:25.340986 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:36:38 crc kubenswrapper[4747]: I0202 09:36:38.347586 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:36:38 crc kubenswrapper[4747]: E0202 09:36:38.348402 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:36:50 crc kubenswrapper[4747]: I0202 09:36:50.339531 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:36:50 crc kubenswrapper[4747]: E0202 09:36:50.340602 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:37:01 crc kubenswrapper[4747]: I0202 09:37:01.339358 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:37:01 crc kubenswrapper[4747]: E0202 09:37:01.340167 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:37:15 crc kubenswrapper[4747]: I0202 09:37:15.340625 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:37:15 crc kubenswrapper[4747]: E0202 09:37:15.341518 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:37:26 crc kubenswrapper[4747]: I0202 09:37:26.339927 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:37:26 crc kubenswrapper[4747]: E0202 09:37:26.340839 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:37:40 crc kubenswrapper[4747]: I0202 09:37:40.341833 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:37:40 crc kubenswrapper[4747]: E0202 09:37:40.343392 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:37:54 crc kubenswrapper[4747]: I0202 09:37:54.340083 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:37:54 crc kubenswrapper[4747]: E0202 09:37:54.341321 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:38:07 crc kubenswrapper[4747]: I0202 09:38:07.339675 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:38:07 crc kubenswrapper[4747]: E0202 09:38:07.341095 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:38:20 crc kubenswrapper[4747]: I0202 09:38:20.342543 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:38:20 crc kubenswrapper[4747]: E0202 09:38:20.343426 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:38:32 crc kubenswrapper[4747]: I0202 09:38:32.339559 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:38:32 crc kubenswrapper[4747]: E0202 09:38:32.340334 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:38:47 crc kubenswrapper[4747]: I0202 09:38:47.339810 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:38:47 crc kubenswrapper[4747]: E0202 09:38:47.341422 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:39:02 crc kubenswrapper[4747]: I0202 09:39:02.340033 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:39:03 crc kubenswrapper[4747]: I0202 09:39:03.094843 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"3b6039505cb0d5d67440d9e7a1c60496b0f473441ea6e956fb39698093504b90"} Feb 02 09:39:29 crc kubenswrapper[4747]: I0202 09:39:29.913037 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gj82l"] Feb 02 09:39:29 crc kubenswrapper[4747]: I0202 09:39:29.922516 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:29 crc kubenswrapper[4747]: I0202 09:39:29.974456 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmgrg\" (UniqueName: \"kubernetes.io/projected/866115b3-77a9-48bc-b597-279183fb2f5a-kube-api-access-xmgrg\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:29 crc kubenswrapper[4747]: I0202 09:39:29.975162 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-utilities\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:29 crc kubenswrapper[4747]: I0202 09:39:29.975238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-catalog-content\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:29 crc kubenswrapper[4747]: I0202 09:39:29.977369 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gj82l"] Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.077451 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmgrg\" (UniqueName: \"kubernetes.io/projected/866115b3-77a9-48bc-b597-279183fb2f5a-kube-api-access-xmgrg\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.077622 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-utilities\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.077655 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-catalog-content\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.078200 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-catalog-content\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.078244 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-utilities\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.119542 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmgrg\" (UniqueName: \"kubernetes.io/projected/866115b3-77a9-48bc-b597-279183fb2f5a-kube-api-access-xmgrg\") pod \"certified-operators-gj82l\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.300648 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:30 crc kubenswrapper[4747]: I0202 09:39:30.813427 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gj82l"] Feb 02 09:39:31 crc kubenswrapper[4747]: I0202 09:39:31.327891 4747 generic.go:334] "Generic (PLEG): container finished" podID="866115b3-77a9-48bc-b597-279183fb2f5a" containerID="085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715" exitCode=0 Feb 02 09:39:31 crc kubenswrapper[4747]: I0202 09:39:31.327991 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gj82l" event={"ID":"866115b3-77a9-48bc-b597-279183fb2f5a","Type":"ContainerDied","Data":"085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715"} Feb 02 09:39:31 crc kubenswrapper[4747]: I0202 09:39:31.328251 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gj82l" event={"ID":"866115b3-77a9-48bc-b597-279183fb2f5a","Type":"ContainerStarted","Data":"58c267a84bddfce62c517a8fe2d7cd3007372889719b9ae3ef2ad0f690524a8a"} Feb 02 09:39:33 crc kubenswrapper[4747]: I0202 09:39:33.347653 4747 generic.go:334] "Generic (PLEG): container finished" podID="866115b3-77a9-48bc-b597-279183fb2f5a" containerID="351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812" exitCode=0 Feb 02 09:39:33 crc kubenswrapper[4747]: I0202 09:39:33.347736 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gj82l" event={"ID":"866115b3-77a9-48bc-b597-279183fb2f5a","Type":"ContainerDied","Data":"351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812"} Feb 02 09:39:34 crc kubenswrapper[4747]: I0202 09:39:34.358246 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gj82l" event={"ID":"866115b3-77a9-48bc-b597-279183fb2f5a","Type":"ContainerStarted","Data":"13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad"} Feb 02 09:39:34 crc kubenswrapper[4747]: I0202 09:39:34.377421 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gj82l" podStartSLOduration=2.927856096 podStartE2EDuration="5.377398147s" podCreationTimestamp="2026-02-02 09:39:29 +0000 UTC" firstStartedPulling="2026-02-02 09:39:31.330371569 +0000 UTC m=+2583.874710002" lastFinishedPulling="2026-02-02 09:39:33.77991362 +0000 UTC m=+2586.324252053" observedRunningTime="2026-02-02 09:39:34.372882224 +0000 UTC m=+2586.917220677" watchObservedRunningTime="2026-02-02 09:39:34.377398147 +0000 UTC m=+2586.921736580" Feb 02 09:39:40 crc kubenswrapper[4747]: I0202 09:39:40.301809 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:40 crc kubenswrapper[4747]: I0202 09:39:40.302430 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:40 crc kubenswrapper[4747]: I0202 09:39:40.356783 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:40 crc kubenswrapper[4747]: I0202 09:39:40.467179 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:40 crc kubenswrapper[4747]: I0202 09:39:40.591198 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gj82l"] Feb 02 09:39:42 crc kubenswrapper[4747]: I0202 09:39:42.440476 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gj82l" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="registry-server" containerID="cri-o://13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad" gracePeriod=2 Feb 02 09:39:42 crc kubenswrapper[4747]: I0202 09:39:42.948771 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.126276 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmgrg\" (UniqueName: \"kubernetes.io/projected/866115b3-77a9-48bc-b597-279183fb2f5a-kube-api-access-xmgrg\") pod \"866115b3-77a9-48bc-b597-279183fb2f5a\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.126606 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-utilities\") pod \"866115b3-77a9-48bc-b597-279183fb2f5a\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.126644 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-catalog-content\") pod \"866115b3-77a9-48bc-b597-279183fb2f5a\" (UID: \"866115b3-77a9-48bc-b597-279183fb2f5a\") " Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.127314 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-utilities" (OuterVolumeSpecName: "utilities") pod "866115b3-77a9-48bc-b597-279183fb2f5a" (UID: "866115b3-77a9-48bc-b597-279183fb2f5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.132338 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/866115b3-77a9-48bc-b597-279183fb2f5a-kube-api-access-xmgrg" (OuterVolumeSpecName: "kube-api-access-xmgrg") pod "866115b3-77a9-48bc-b597-279183fb2f5a" (UID: "866115b3-77a9-48bc-b597-279183fb2f5a"). InnerVolumeSpecName "kube-api-access-xmgrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.229352 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmgrg\" (UniqueName: \"kubernetes.io/projected/866115b3-77a9-48bc-b597-279183fb2f5a-kube-api-access-xmgrg\") on node \"crc\" DevicePath \"\"" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.229388 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.339503 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "866115b3-77a9-48bc-b597-279183fb2f5a" (UID: "866115b3-77a9-48bc-b597-279183fb2f5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.434455 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/866115b3-77a9-48bc-b597-279183fb2f5a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.451375 4747 generic.go:334] "Generic (PLEG): container finished" podID="866115b3-77a9-48bc-b597-279183fb2f5a" containerID="13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad" exitCode=0 Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.451413 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gj82l" event={"ID":"866115b3-77a9-48bc-b597-279183fb2f5a","Type":"ContainerDied","Data":"13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad"} Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.451437 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gj82l" event={"ID":"866115b3-77a9-48bc-b597-279183fb2f5a","Type":"ContainerDied","Data":"58c267a84bddfce62c517a8fe2d7cd3007372889719b9ae3ef2ad0f690524a8a"} Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.451488 4747 scope.go:117] "RemoveContainer" containerID="13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.452389 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gj82l" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.471826 4747 scope.go:117] "RemoveContainer" containerID="351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.492317 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gj82l"] Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.503110 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gj82l"] Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.510448 4747 scope.go:117] "RemoveContainer" containerID="085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.552227 4747 scope.go:117] "RemoveContainer" containerID="13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad" Feb 02 09:39:43 crc kubenswrapper[4747]: E0202 09:39:43.552650 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad\": container with ID starting with 13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad not found: ID does not exist" containerID="13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.552689 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad"} err="failed to get container status \"13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad\": rpc error: code = NotFound desc = could not find container \"13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad\": container with ID starting with 13832656d2ef481bb5a4eacec493b7feaf0e0af66d9de9d43c6cacede4d626ad not found: ID does not exist" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.552715 4747 scope.go:117] "RemoveContainer" containerID="351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812" Feb 02 09:39:43 crc kubenswrapper[4747]: E0202 09:39:43.553100 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812\": container with ID starting with 351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812 not found: ID does not exist" containerID="351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.553225 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812"} err="failed to get container status \"351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812\": rpc error: code = NotFound desc = could not find container \"351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812\": container with ID starting with 351e9775d2c922421ab5a25349d1892594e1af2d8876106f0ebb36697a485812 not found: ID does not exist" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.553338 4747 scope.go:117] "RemoveContainer" containerID="085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715" Feb 02 09:39:43 crc kubenswrapper[4747]: E0202 09:39:43.553751 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715\": container with ID starting with 085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715 not found: ID does not exist" containerID="085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715" Feb 02 09:39:43 crc kubenswrapper[4747]: I0202 09:39:43.553802 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715"} err="failed to get container status \"085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715\": rpc error: code = NotFound desc = could not find container \"085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715\": container with ID starting with 085e7c2145b5c0ca06b05ad5f0746851b7956a77dd2f052600963b3809dcb715 not found: ID does not exist" Feb 02 09:39:44 crc kubenswrapper[4747]: I0202 09:39:44.349852 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" path="/var/lib/kubelet/pods/866115b3-77a9-48bc-b597-279183fb2f5a/volumes" Feb 02 09:41:20 crc kubenswrapper[4747]: I0202 09:41:20.518653 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:41:20 crc kubenswrapper[4747]: I0202 09:41:20.520538 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:41:50 crc kubenswrapper[4747]: I0202 09:41:50.519041 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:41:50 crc kubenswrapper[4747]: I0202 09:41:50.519799 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.518643 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.519197 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.519244 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.519928 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3b6039505cb0d5d67440d9e7a1c60496b0f473441ea6e956fb39698093504b90"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.519992 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://3b6039505cb0d5d67440d9e7a1c60496b0f473441ea6e956fb39698093504b90" gracePeriod=600 Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.842073 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="3b6039505cb0d5d67440d9e7a1c60496b0f473441ea6e956fb39698093504b90" exitCode=0 Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.842599 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"3b6039505cb0d5d67440d9e7a1c60496b0f473441ea6e956fb39698093504b90"} Feb 02 09:42:20 crc kubenswrapper[4747]: I0202 09:42:20.842648 4747 scope.go:117] "RemoveContainer" containerID="0dd9c028dcd7c8e8b927cf9c7c182cc7f549e300074cbd4ebfb5287e0a99ef36" Feb 02 09:42:21 crc kubenswrapper[4747]: I0202 09:42:21.853089 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe"} Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.000680 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lmxqm"] Feb 02 09:42:44 crc kubenswrapper[4747]: E0202 09:42:44.001601 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="extract-content" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.001614 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="extract-content" Feb 02 09:42:44 crc kubenswrapper[4747]: E0202 09:42:44.001638 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="extract-utilities" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.001645 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="extract-utilities" Feb 02 09:42:44 crc kubenswrapper[4747]: E0202 09:42:44.001669 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="registry-server" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.001675 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="registry-server" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.001851 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="866115b3-77a9-48bc-b597-279183fb2f5a" containerName="registry-server" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.003268 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.014798 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lmxqm"] Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.099111 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-catalog-content\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.099247 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxc7h\" (UniqueName: \"kubernetes.io/projected/ce438670-57d0-4db6-b86a-b70921f33d52-kube-api-access-fxc7h\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.099332 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-utilities\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.200760 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxc7h\" (UniqueName: \"kubernetes.io/projected/ce438670-57d0-4db6-b86a-b70921f33d52-kube-api-access-fxc7h\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.200889 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-utilities\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.200998 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-catalog-content\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.201582 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-utilities\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.201629 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-catalog-content\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.220968 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxc7h\" (UniqueName: \"kubernetes.io/projected/ce438670-57d0-4db6-b86a-b70921f33d52-kube-api-access-fxc7h\") pod \"redhat-marketplace-lmxqm\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.341780 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:44 crc kubenswrapper[4747]: I0202 09:42:44.795719 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lmxqm"] Feb 02 09:42:45 crc kubenswrapper[4747]: I0202 09:42:45.042524 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce438670-57d0-4db6-b86a-b70921f33d52" containerID="a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064" exitCode=0 Feb 02 09:42:45 crc kubenswrapper[4747]: I0202 09:42:45.042612 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lmxqm" event={"ID":"ce438670-57d0-4db6-b86a-b70921f33d52","Type":"ContainerDied","Data":"a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064"} Feb 02 09:42:45 crc kubenswrapper[4747]: I0202 09:42:45.044662 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:42:45 crc kubenswrapper[4747]: I0202 09:42:45.045147 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lmxqm" event={"ID":"ce438670-57d0-4db6-b86a-b70921f33d52","Type":"ContainerStarted","Data":"9c1ab45738822e6787f475a67ca426f04797104e2b5af05a4f2b86a01c2bfef5"} Feb 02 09:42:46 crc kubenswrapper[4747]: I0202 09:42:46.056763 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lmxqm" event={"ID":"ce438670-57d0-4db6-b86a-b70921f33d52","Type":"ContainerStarted","Data":"d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3"} Feb 02 09:42:47 crc kubenswrapper[4747]: I0202 09:42:47.066887 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce438670-57d0-4db6-b86a-b70921f33d52" containerID="d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3" exitCode=0 Feb 02 09:42:47 crc kubenswrapper[4747]: I0202 09:42:47.066960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lmxqm" event={"ID":"ce438670-57d0-4db6-b86a-b70921f33d52","Type":"ContainerDied","Data":"d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3"} Feb 02 09:42:49 crc kubenswrapper[4747]: I0202 09:42:49.095809 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lmxqm" event={"ID":"ce438670-57d0-4db6-b86a-b70921f33d52","Type":"ContainerStarted","Data":"69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439"} Feb 02 09:42:49 crc kubenswrapper[4747]: I0202 09:42:49.117511 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lmxqm" podStartSLOduration=2.667391976 podStartE2EDuration="6.117492685s" podCreationTimestamp="2026-02-02 09:42:43 +0000 UTC" firstStartedPulling="2026-02-02 09:42:45.044457912 +0000 UTC m=+2777.588796345" lastFinishedPulling="2026-02-02 09:42:48.494558621 +0000 UTC m=+2781.038897054" observedRunningTime="2026-02-02 09:42:49.116380077 +0000 UTC m=+2781.660718520" watchObservedRunningTime="2026-02-02 09:42:49.117492685 +0000 UTC m=+2781.661831128" Feb 02 09:42:54 crc kubenswrapper[4747]: I0202 09:42:54.356679 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:54 crc kubenswrapper[4747]: I0202 09:42:54.357330 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:54 crc kubenswrapper[4747]: I0202 09:42:54.396721 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:55 crc kubenswrapper[4747]: I0202 09:42:55.208616 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:55 crc kubenswrapper[4747]: I0202 09:42:55.261121 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lmxqm"] Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.157357 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lmxqm" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="registry-server" containerID="cri-o://69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439" gracePeriod=2 Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.735462 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.897199 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-catalog-content\") pod \"ce438670-57d0-4db6-b86a-b70921f33d52\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.897337 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-utilities\") pod \"ce438670-57d0-4db6-b86a-b70921f33d52\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.897517 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxc7h\" (UniqueName: \"kubernetes.io/projected/ce438670-57d0-4db6-b86a-b70921f33d52-kube-api-access-fxc7h\") pod \"ce438670-57d0-4db6-b86a-b70921f33d52\" (UID: \"ce438670-57d0-4db6-b86a-b70921f33d52\") " Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.898412 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-utilities" (OuterVolumeSpecName: "utilities") pod "ce438670-57d0-4db6-b86a-b70921f33d52" (UID: "ce438670-57d0-4db6-b86a-b70921f33d52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.905925 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce438670-57d0-4db6-b86a-b70921f33d52-kube-api-access-fxc7h" (OuterVolumeSpecName: "kube-api-access-fxc7h") pod "ce438670-57d0-4db6-b86a-b70921f33d52" (UID: "ce438670-57d0-4db6-b86a-b70921f33d52"). InnerVolumeSpecName "kube-api-access-fxc7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.924723 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce438670-57d0-4db6-b86a-b70921f33d52" (UID: "ce438670-57d0-4db6-b86a-b70921f33d52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.999840 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxc7h\" (UniqueName: \"kubernetes.io/projected/ce438670-57d0-4db6-b86a-b70921f33d52-kube-api-access-fxc7h\") on node \"crc\" DevicePath \"\"" Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.999878 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:42:57 crc kubenswrapper[4747]: I0202 09:42:57.999887 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce438670-57d0-4db6-b86a-b70921f33d52-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.167930 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce438670-57d0-4db6-b86a-b70921f33d52" containerID="69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439" exitCode=0 Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.167961 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lmxqm" event={"ID":"ce438670-57d0-4db6-b86a-b70921f33d52","Type":"ContainerDied","Data":"69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439"} Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.168013 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lmxqm" event={"ID":"ce438670-57d0-4db6-b86a-b70921f33d52","Type":"ContainerDied","Data":"9c1ab45738822e6787f475a67ca426f04797104e2b5af05a4f2b86a01c2bfef5"} Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.168033 4747 scope.go:117] "RemoveContainer" containerID="69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.168036 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lmxqm" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.196230 4747 scope.go:117] "RemoveContainer" containerID="d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.203779 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lmxqm"] Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.211896 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lmxqm"] Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.228575 4747 scope.go:117] "RemoveContainer" containerID="a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.270004 4747 scope.go:117] "RemoveContainer" containerID="69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439" Feb 02 09:42:58 crc kubenswrapper[4747]: E0202 09:42:58.270477 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439\": container with ID starting with 69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439 not found: ID does not exist" containerID="69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.270511 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439"} err="failed to get container status \"69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439\": rpc error: code = NotFound desc = could not find container \"69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439\": container with ID starting with 69f49f2c09cc8cb539e99fc1c1d4ef29ae6d2c989517e584bcb1340f69c51439 not found: ID does not exist" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.270535 4747 scope.go:117] "RemoveContainer" containerID="d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3" Feb 02 09:42:58 crc kubenswrapper[4747]: E0202 09:42:58.270963 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3\": container with ID starting with d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3 not found: ID does not exist" containerID="d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.271011 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3"} err="failed to get container status \"d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3\": rpc error: code = NotFound desc = could not find container \"d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3\": container with ID starting with d3f121aea7bcb8fb0d174fc50b71dc8fb4d58511347f98591d4f94cedcf3d7b3 not found: ID does not exist" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.271037 4747 scope.go:117] "RemoveContainer" containerID="a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064" Feb 02 09:42:58 crc kubenswrapper[4747]: E0202 09:42:58.271425 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064\": container with ID starting with a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064 not found: ID does not exist" containerID="a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.271489 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064"} err="failed to get container status \"a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064\": rpc error: code = NotFound desc = could not find container \"a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064\": container with ID starting with a93d7295e9c1f4370ed95a44ddf351ad7658419b422837d03bd881e5210a0064 not found: ID does not exist" Feb 02 09:42:58 crc kubenswrapper[4747]: I0202 09:42:58.352866 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" path="/var/lib/kubelet/pods/ce438670-57d0-4db6-b86a-b70921f33d52/volumes" Feb 02 09:44:20 crc kubenswrapper[4747]: I0202 09:44:20.518191 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:44:20 crc kubenswrapper[4747]: I0202 09:44:20.519922 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.771269 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tsfjc"] Feb 02 09:44:21 crc kubenswrapper[4747]: E0202 09:44:21.771886 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="extract-content" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.771898 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="extract-content" Feb 02 09:44:21 crc kubenswrapper[4747]: E0202 09:44:21.771929 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="extract-utilities" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.771950 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="extract-utilities" Feb 02 09:44:21 crc kubenswrapper[4747]: E0202 09:44:21.771961 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="registry-server" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.771967 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="registry-server" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.772139 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce438670-57d0-4db6-b86a-b70921f33d52" containerName="registry-server" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.773632 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.790398 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tsfjc"] Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.928293 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4n9t\" (UniqueName: \"kubernetes.io/projected/d745bd9e-a742-4587-bcef-d24a9180a367-kube-api-access-m4n9t\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.928460 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-utilities\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:21 crc kubenswrapper[4747]: I0202 09:44:21.928513 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-catalog-content\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.030444 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4n9t\" (UniqueName: \"kubernetes.io/projected/d745bd9e-a742-4587-bcef-d24a9180a367-kube-api-access-m4n9t\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.030541 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-utilities\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.030598 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-catalog-content\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.031178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-utilities\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.031233 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-catalog-content\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.054763 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4n9t\" (UniqueName: \"kubernetes.io/projected/d745bd9e-a742-4587-bcef-d24a9180a367-kube-api-access-m4n9t\") pod \"redhat-operators-tsfjc\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.094168 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.551553 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tsfjc"] Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.899687 4747 generic.go:334] "Generic (PLEG): container finished" podID="d745bd9e-a742-4587-bcef-d24a9180a367" containerID="de92aea7f3aefc0ff188fce2acd68b356c95831bd77041c6c23728bda0e44b44" exitCode=0 Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.899739 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsfjc" event={"ID":"d745bd9e-a742-4587-bcef-d24a9180a367","Type":"ContainerDied","Data":"de92aea7f3aefc0ff188fce2acd68b356c95831bd77041c6c23728bda0e44b44"} Feb 02 09:44:22 crc kubenswrapper[4747]: I0202 09:44:22.899767 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsfjc" event={"ID":"d745bd9e-a742-4587-bcef-d24a9180a367","Type":"ContainerStarted","Data":"b1ad2733d8de47f8e187dabde8e6d3b259f7f0790f25fd835f3587286988a30d"} Feb 02 09:44:23 crc kubenswrapper[4747]: I0202 09:44:23.908946 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsfjc" event={"ID":"d745bd9e-a742-4587-bcef-d24a9180a367","Type":"ContainerStarted","Data":"ef329933d14bc1102846312423922ee05eb16f3840151e4128e6f21c080061c9"} Feb 02 09:44:24 crc kubenswrapper[4747]: I0202 09:44:24.920515 4747 generic.go:334] "Generic (PLEG): container finished" podID="d745bd9e-a742-4587-bcef-d24a9180a367" containerID="ef329933d14bc1102846312423922ee05eb16f3840151e4128e6f21c080061c9" exitCode=0 Feb 02 09:44:24 crc kubenswrapper[4747]: I0202 09:44:24.920580 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsfjc" event={"ID":"d745bd9e-a742-4587-bcef-d24a9180a367","Type":"ContainerDied","Data":"ef329933d14bc1102846312423922ee05eb16f3840151e4128e6f21c080061c9"} Feb 02 09:44:25 crc kubenswrapper[4747]: I0202 09:44:25.933185 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsfjc" event={"ID":"d745bd9e-a742-4587-bcef-d24a9180a367","Type":"ContainerStarted","Data":"a8596700c30101ba957d32c8bc3eee1f438cfdbd09c364b1d9fcf96c9c6d4aab"} Feb 02 09:44:25 crc kubenswrapper[4747]: I0202 09:44:25.961643 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tsfjc" podStartSLOduration=2.430954708 podStartE2EDuration="4.961617073s" podCreationTimestamp="2026-02-02 09:44:21 +0000 UTC" firstStartedPulling="2026-02-02 09:44:22.902135804 +0000 UTC m=+2875.446474237" lastFinishedPulling="2026-02-02 09:44:25.432798169 +0000 UTC m=+2877.977136602" observedRunningTime="2026-02-02 09:44:25.954404163 +0000 UTC m=+2878.498742596" watchObservedRunningTime="2026-02-02 09:44:25.961617073 +0000 UTC m=+2878.505955506" Feb 02 09:44:32 crc kubenswrapper[4747]: I0202 09:44:32.094860 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:32 crc kubenswrapper[4747]: I0202 09:44:32.095584 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:32 crc kubenswrapper[4747]: I0202 09:44:32.146146 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:33 crc kubenswrapper[4747]: I0202 09:44:33.054416 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:33 crc kubenswrapper[4747]: I0202 09:44:33.102788 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tsfjc"] Feb 02 09:44:35 crc kubenswrapper[4747]: I0202 09:44:35.034092 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tsfjc" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="registry-server" containerID="cri-o://a8596700c30101ba957d32c8bc3eee1f438cfdbd09c364b1d9fcf96c9c6d4aab" gracePeriod=2 Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.059716 4747 generic.go:334] "Generic (PLEG): container finished" podID="d745bd9e-a742-4587-bcef-d24a9180a367" containerID="a8596700c30101ba957d32c8bc3eee1f438cfdbd09c364b1d9fcf96c9c6d4aab" exitCode=0 Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.060311 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsfjc" event={"ID":"d745bd9e-a742-4587-bcef-d24a9180a367","Type":"ContainerDied","Data":"a8596700c30101ba957d32c8bc3eee1f438cfdbd09c364b1d9fcf96c9c6d4aab"} Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.371150 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.523560 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-catalog-content\") pod \"d745bd9e-a742-4587-bcef-d24a9180a367\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.523618 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-utilities\") pod \"d745bd9e-a742-4587-bcef-d24a9180a367\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.523662 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4n9t\" (UniqueName: \"kubernetes.io/projected/d745bd9e-a742-4587-bcef-d24a9180a367-kube-api-access-m4n9t\") pod \"d745bd9e-a742-4587-bcef-d24a9180a367\" (UID: \"d745bd9e-a742-4587-bcef-d24a9180a367\") " Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.524653 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-utilities" (OuterVolumeSpecName: "utilities") pod "d745bd9e-a742-4587-bcef-d24a9180a367" (UID: "d745bd9e-a742-4587-bcef-d24a9180a367"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.529692 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d745bd9e-a742-4587-bcef-d24a9180a367-kube-api-access-m4n9t" (OuterVolumeSpecName: "kube-api-access-m4n9t") pod "d745bd9e-a742-4587-bcef-d24a9180a367" (UID: "d745bd9e-a742-4587-bcef-d24a9180a367"). InnerVolumeSpecName "kube-api-access-m4n9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.627246 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.627301 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4n9t\" (UniqueName: \"kubernetes.io/projected/d745bd9e-a742-4587-bcef-d24a9180a367-kube-api-access-m4n9t\") on node \"crc\" DevicePath \"\"" Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.634398 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d745bd9e-a742-4587-bcef-d24a9180a367" (UID: "d745bd9e-a742-4587-bcef-d24a9180a367"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:44:37 crc kubenswrapper[4747]: I0202 09:44:37.728967 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d745bd9e-a742-4587-bcef-d24a9180a367-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.073294 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsfjc" event={"ID":"d745bd9e-a742-4587-bcef-d24a9180a367","Type":"ContainerDied","Data":"b1ad2733d8de47f8e187dabde8e6d3b259f7f0790f25fd835f3587286988a30d"} Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.073370 4747 scope.go:117] "RemoveContainer" containerID="a8596700c30101ba957d32c8bc3eee1f438cfdbd09c364b1d9fcf96c9c6d4aab" Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.073375 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsfjc" Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.095149 4747 scope.go:117] "RemoveContainer" containerID="ef329933d14bc1102846312423922ee05eb16f3840151e4128e6f21c080061c9" Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.120569 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tsfjc"] Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.131005 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tsfjc"] Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.143515 4747 scope.go:117] "RemoveContainer" containerID="de92aea7f3aefc0ff188fce2acd68b356c95831bd77041c6c23728bda0e44b44" Feb 02 09:44:38 crc kubenswrapper[4747]: I0202 09:44:38.352184 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" path="/var/lib/kubelet/pods/d745bd9e-a742-4587-bcef-d24a9180a367/volumes" Feb 02 09:44:50 crc kubenswrapper[4747]: I0202 09:44:50.518414 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:44:50 crc kubenswrapper[4747]: I0202 09:44:50.518895 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.159955 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9"] Feb 02 09:45:00 crc kubenswrapper[4747]: E0202 09:45:00.160851 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="extract-content" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.160867 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="extract-content" Feb 02 09:45:00 crc kubenswrapper[4747]: E0202 09:45:00.160896 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="extract-utilities" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.160903 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="extract-utilities" Feb 02 09:45:00 crc kubenswrapper[4747]: E0202 09:45:00.160928 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="registry-server" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.160948 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="registry-server" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.161121 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d745bd9e-a742-4587-bcef-d24a9180a367" containerName="registry-server" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.161802 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.164289 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.164813 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.168823 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9"] Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.274041 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgr6w\" (UniqueName: \"kubernetes.io/projected/99d069b3-f36d-4a62-b315-c14b562014c5-kube-api-access-hgr6w\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.274164 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99d069b3-f36d-4a62-b315-c14b562014c5-secret-volume\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.274293 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99d069b3-f36d-4a62-b315-c14b562014c5-config-volume\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.376230 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99d069b3-f36d-4a62-b315-c14b562014c5-secret-volume\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.376345 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99d069b3-f36d-4a62-b315-c14b562014c5-config-volume\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.376460 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgr6w\" (UniqueName: \"kubernetes.io/projected/99d069b3-f36d-4a62-b315-c14b562014c5-kube-api-access-hgr6w\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.378496 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99d069b3-f36d-4a62-b315-c14b562014c5-config-volume\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.383217 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99d069b3-f36d-4a62-b315-c14b562014c5-secret-volume\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.396439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgr6w\" (UniqueName: \"kubernetes.io/projected/99d069b3-f36d-4a62-b315-c14b562014c5-kube-api-access-hgr6w\") pod \"collect-profiles-29500425-r8df9\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.485277 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:00 crc kubenswrapper[4747]: I0202 09:45:00.971299 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9"] Feb 02 09:45:00 crc kubenswrapper[4747]: W0202 09:45:00.980198 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99d069b3_f36d_4a62_b315_c14b562014c5.slice/crio-829bccaf7a492843cb457441d0ac9630c6a2155222603f1eedcc60d2bd2e97b0 WatchSource:0}: Error finding container 829bccaf7a492843cb457441d0ac9630c6a2155222603f1eedcc60d2bd2e97b0: Status 404 returned error can't find the container with id 829bccaf7a492843cb457441d0ac9630c6a2155222603f1eedcc60d2bd2e97b0 Feb 02 09:45:01 crc kubenswrapper[4747]: I0202 09:45:01.269357 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" event={"ID":"99d069b3-f36d-4a62-b315-c14b562014c5","Type":"ContainerStarted","Data":"f9967e356c55a071341c6ffe357cfd3fe52e8eaec1d71659167f37c5eacee6e9"} Feb 02 09:45:01 crc kubenswrapper[4747]: I0202 09:45:01.269403 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" event={"ID":"99d069b3-f36d-4a62-b315-c14b562014c5","Type":"ContainerStarted","Data":"829bccaf7a492843cb457441d0ac9630c6a2155222603f1eedcc60d2bd2e97b0"} Feb 02 09:45:01 crc kubenswrapper[4747]: I0202 09:45:01.296611 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" podStartSLOduration=1.296583081 podStartE2EDuration="1.296583081s" podCreationTimestamp="2026-02-02 09:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 09:45:01.2897287 +0000 UTC m=+2913.834067133" watchObservedRunningTime="2026-02-02 09:45:01.296583081 +0000 UTC m=+2913.840921514" Feb 02 09:45:02 crc kubenswrapper[4747]: I0202 09:45:02.279382 4747 generic.go:334] "Generic (PLEG): container finished" podID="99d069b3-f36d-4a62-b315-c14b562014c5" containerID="f9967e356c55a071341c6ffe357cfd3fe52e8eaec1d71659167f37c5eacee6e9" exitCode=0 Feb 02 09:45:02 crc kubenswrapper[4747]: I0202 09:45:02.279435 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" event={"ID":"99d069b3-f36d-4a62-b315-c14b562014c5","Type":"ContainerDied","Data":"f9967e356c55a071341c6ffe357cfd3fe52e8eaec1d71659167f37c5eacee6e9"} Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.808483 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.873151 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99d069b3-f36d-4a62-b315-c14b562014c5-config-volume\") pod \"99d069b3-f36d-4a62-b315-c14b562014c5\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.873353 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99d069b3-f36d-4a62-b315-c14b562014c5-secret-volume\") pod \"99d069b3-f36d-4a62-b315-c14b562014c5\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.873431 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgr6w\" (UniqueName: \"kubernetes.io/projected/99d069b3-f36d-4a62-b315-c14b562014c5-kube-api-access-hgr6w\") pod \"99d069b3-f36d-4a62-b315-c14b562014c5\" (UID: \"99d069b3-f36d-4a62-b315-c14b562014c5\") " Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.874351 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99d069b3-f36d-4a62-b315-c14b562014c5-config-volume" (OuterVolumeSpecName: "config-volume") pod "99d069b3-f36d-4a62-b315-c14b562014c5" (UID: "99d069b3-f36d-4a62-b315-c14b562014c5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.878324 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99d069b3-f36d-4a62-b315-c14b562014c5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "99d069b3-f36d-4a62-b315-c14b562014c5" (UID: "99d069b3-f36d-4a62-b315-c14b562014c5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.878737 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99d069b3-f36d-4a62-b315-c14b562014c5-kube-api-access-hgr6w" (OuterVolumeSpecName: "kube-api-access-hgr6w") pod "99d069b3-f36d-4a62-b315-c14b562014c5" (UID: "99d069b3-f36d-4a62-b315-c14b562014c5"). InnerVolumeSpecName "kube-api-access-hgr6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.975842 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgr6w\" (UniqueName: \"kubernetes.io/projected/99d069b3-f36d-4a62-b315-c14b562014c5-kube-api-access-hgr6w\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.975876 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99d069b3-f36d-4a62-b315-c14b562014c5-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:03 crc kubenswrapper[4747]: I0202 09:45:03.975886 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99d069b3-f36d-4a62-b315-c14b562014c5-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:04 crc kubenswrapper[4747]: I0202 09:45:04.296817 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" event={"ID":"99d069b3-f36d-4a62-b315-c14b562014c5","Type":"ContainerDied","Data":"829bccaf7a492843cb457441d0ac9630c6a2155222603f1eedcc60d2bd2e97b0"} Feb 02 09:45:04 crc kubenswrapper[4747]: I0202 09:45:04.296858 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="829bccaf7a492843cb457441d0ac9630c6a2155222603f1eedcc60d2bd2e97b0" Feb 02 09:45:04 crc kubenswrapper[4747]: I0202 09:45:04.296948 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500425-r8df9" Feb 02 09:45:04 crc kubenswrapper[4747]: I0202 09:45:04.363394 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd"] Feb 02 09:45:04 crc kubenswrapper[4747]: I0202 09:45:04.371103 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500380-2wfhd"] Feb 02 09:45:06 crc kubenswrapper[4747]: I0202 09:45:06.356014 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="939ae80d-cdd6-4306-b7fd-7b222f530288" path="/var/lib/kubelet/pods/939ae80d-cdd6-4306-b7fd-7b222f530288/volumes" Feb 02 09:45:08 crc kubenswrapper[4747]: I0202 09:45:08.117629 4747 scope.go:117] "RemoveContainer" containerID="367d53fc44cfbd809577fe19febfdd2715d049919efa8816d6dc8abed2d81b28" Feb 02 09:45:20 crc kubenswrapper[4747]: I0202 09:45:20.519260 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:45:20 crc kubenswrapper[4747]: I0202 09:45:20.519890 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:45:20 crc kubenswrapper[4747]: I0202 09:45:20.519954 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:45:20 crc kubenswrapper[4747]: I0202 09:45:20.520767 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:45:20 crc kubenswrapper[4747]: I0202 09:45:20.520825 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" gracePeriod=600 Feb 02 09:45:20 crc kubenswrapper[4747]: E0202 09:45:20.661784 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:45:21 crc kubenswrapper[4747]: I0202 09:45:21.440877 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" exitCode=0 Feb 02 09:45:21 crc kubenswrapper[4747]: I0202 09:45:21.440959 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe"} Feb 02 09:45:21 crc kubenswrapper[4747]: I0202 09:45:21.441262 4747 scope.go:117] "RemoveContainer" containerID="3b6039505cb0d5d67440d9e7a1c60496b0f473441ea6e956fb39698093504b90" Feb 02 09:45:21 crc kubenswrapper[4747]: I0202 09:45:21.441790 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:45:21 crc kubenswrapper[4747]: E0202 09:45:21.442024 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:45:36 crc kubenswrapper[4747]: I0202 09:45:36.340105 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:45:36 crc kubenswrapper[4747]: E0202 09:45:36.341023 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:45:49 crc kubenswrapper[4747]: I0202 09:45:49.714258 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"84239355-717c-437f-abf5-b5df1b3a0806","Type":"ContainerDied","Data":"db3d3dc2ea52f421407ec8a3ac7e8d5db634e56eddbb8fb17d8b0f441d0cef3c"} Feb 02 09:45:49 crc kubenswrapper[4747]: I0202 09:45:49.714255 4747 generic.go:334] "Generic (PLEG): container finished" podID="84239355-717c-437f-abf5-b5df1b3a0806" containerID="db3d3dc2ea52f421407ec8a3ac7e8d5db634e56eddbb8fb17d8b0f441d0cef3c" exitCode=0 Feb 02 09:45:50 crc kubenswrapper[4747]: I0202 09:45:50.339884 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:45:50 crc kubenswrapper[4747]: E0202 09:45:50.340344 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.098975 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.205225 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-config-data\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.205298 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.205333 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.205402 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ca-certs\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.205439 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config-secret\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.206059 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-temporary\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.206150 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-workdir\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.206200 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qmcm\" (UniqueName: \"kubernetes.io/projected/84239355-717c-437f-abf5-b5df1b3a0806-kube-api-access-4qmcm\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.206277 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ssh-key\") pod \"84239355-717c-437f-abf5-b5df1b3a0806\" (UID: \"84239355-717c-437f-abf5-b5df1b3a0806\") " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.206270 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.206449 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-config-data" (OuterVolumeSpecName: "config-data") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.207100 4747 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.207282 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.210971 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.213158 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.213273 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84239355-717c-437f-abf5-b5df1b3a0806-kube-api-access-4qmcm" (OuterVolumeSpecName: "kube-api-access-4qmcm") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "kube-api-access-4qmcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.236799 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.239188 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.239641 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.263686 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "84239355-717c-437f-abf5-b5df1b3a0806" (UID: "84239355-717c-437f-abf5-b5df1b3a0806"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.308690 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.308757 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.308768 4747 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ca-certs\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.308777 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.308789 4747 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/84239355-717c-437f-abf5-b5df1b3a0806-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.308799 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qmcm\" (UniqueName: \"kubernetes.io/projected/84239355-717c-437f-abf5-b5df1b3a0806-kube-api-access-4qmcm\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.308808 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/84239355-717c-437f-abf5-b5df1b3a0806-ssh-key\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.328529 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.411383 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.734105 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"84239355-717c-437f-abf5-b5df1b3a0806","Type":"ContainerDied","Data":"a260236b49a099bb0bae66d8f42f9798f7d1521cda076a5e166641974f5a0bcb"} Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.734146 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a260236b49a099bb0bae66d8f42f9798f7d1521cda076a5e166641974f5a0bcb" Feb 02 09:45:51 crc kubenswrapper[4747]: I0202 09:45:51.734144 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.246412 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 02 09:45:57 crc kubenswrapper[4747]: E0202 09:45:57.247550 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d069b3-f36d-4a62-b315-c14b562014c5" containerName="collect-profiles" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.247568 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d069b3-f36d-4a62-b315-c14b562014c5" containerName="collect-profiles" Feb 02 09:45:57 crc kubenswrapper[4747]: E0202 09:45:57.247589 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84239355-717c-437f-abf5-b5df1b3a0806" containerName="tempest-tests-tempest-tests-runner" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.247598 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="84239355-717c-437f-abf5-b5df1b3a0806" containerName="tempest-tests-tempest-tests-runner" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.247845 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="99d069b3-f36d-4a62-b315-c14b562014c5" containerName="collect-profiles" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.247884 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="84239355-717c-437f-abf5-b5df1b3a0806" containerName="tempest-tests-tempest-tests-runner" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.248891 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.251250 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mp7z4" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.259072 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.325427 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5c1cf2d0-f9fb-494c-908f-5ccabec6364a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.325611 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpnwc\" (UniqueName: \"kubernetes.io/projected/5c1cf2d0-f9fb-494c-908f-5ccabec6364a-kube-api-access-gpnwc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5c1cf2d0-f9fb-494c-908f-5ccabec6364a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.427503 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpnwc\" (UniqueName: \"kubernetes.io/projected/5c1cf2d0-f9fb-494c-908f-5ccabec6364a-kube-api-access-gpnwc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5c1cf2d0-f9fb-494c-908f-5ccabec6364a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.427894 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5c1cf2d0-f9fb-494c-908f-5ccabec6364a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.428541 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5c1cf2d0-f9fb-494c-908f-5ccabec6364a\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.463662 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpnwc\" (UniqueName: \"kubernetes.io/projected/5c1cf2d0-f9fb-494c-908f-5ccabec6364a-kube-api-access-gpnwc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5c1cf2d0-f9fb-494c-908f-5ccabec6364a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.475066 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"5c1cf2d0-f9fb-494c-908f-5ccabec6364a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:57 crc kubenswrapper[4747]: I0202 09:45:57.574770 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 02 09:45:58 crc kubenswrapper[4747]: I0202 09:45:58.020302 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 02 09:45:58 crc kubenswrapper[4747]: W0202 09:45:58.023378 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c1cf2d0_f9fb_494c_908f_5ccabec6364a.slice/crio-6552a22c4cfc06b7e8b1dfb06b9e45c7d8a6804f857438880bf363dfbe8c4d7c WatchSource:0}: Error finding container 6552a22c4cfc06b7e8b1dfb06b9e45c7d8a6804f857438880bf363dfbe8c4d7c: Status 404 returned error can't find the container with id 6552a22c4cfc06b7e8b1dfb06b9e45c7d8a6804f857438880bf363dfbe8c4d7c Feb 02 09:45:58 crc kubenswrapper[4747]: I0202 09:45:58.795208 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"5c1cf2d0-f9fb-494c-908f-5ccabec6364a","Type":"ContainerStarted","Data":"6552a22c4cfc06b7e8b1dfb06b9e45c7d8a6804f857438880bf363dfbe8c4d7c"} Feb 02 09:45:59 crc kubenswrapper[4747]: I0202 09:45:59.804590 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"5c1cf2d0-f9fb-494c-908f-5ccabec6364a","Type":"ContainerStarted","Data":"5bbc6a140d61d245bd7b562bb984b566f2ae014b18e6e08ec9cfe097e4367014"} Feb 02 09:45:59 crc kubenswrapper[4747]: I0202 09:45:59.823087 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.9972938949999999 podStartE2EDuration="2.822841479s" podCreationTimestamp="2026-02-02 09:45:57 +0000 UTC" firstStartedPulling="2026-02-02 09:45:58.025580366 +0000 UTC m=+2970.569918799" lastFinishedPulling="2026-02-02 09:45:58.85112794 +0000 UTC m=+2971.395466383" observedRunningTime="2026-02-02 09:45:59.819064285 +0000 UTC m=+2972.363402758" watchObservedRunningTime="2026-02-02 09:45:59.822841479 +0000 UTC m=+2972.367179932" Feb 02 09:46:01 crc kubenswrapper[4747]: I0202 09:46:01.339664 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:46:01 crc kubenswrapper[4747]: E0202 09:46:01.340149 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:46:12 crc kubenswrapper[4747]: I0202 09:46:12.339350 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:46:12 crc kubenswrapper[4747]: E0202 09:46:12.340068 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:46:25 crc kubenswrapper[4747]: I0202 09:46:25.944963 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xhpmv/must-gather-srbbm"] Feb 02 09:46:25 crc kubenswrapper[4747]: I0202 09:46:25.948403 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:25 crc kubenswrapper[4747]: I0202 09:46:25.950755 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xhpmv"/"openshift-service-ca.crt" Feb 02 09:46:25 crc kubenswrapper[4747]: I0202 09:46:25.952078 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xhpmv"/"kube-root-ca.crt" Feb 02 09:46:25 crc kubenswrapper[4747]: I0202 09:46:25.968808 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xhpmv/must-gather-srbbm"] Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.077153 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/894bd1e5-3142-4804-8cb1-1afc854a92f1-must-gather-output\") pod \"must-gather-srbbm\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.077529 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnmpk\" (UniqueName: \"kubernetes.io/projected/894bd1e5-3142-4804-8cb1-1afc854a92f1-kube-api-access-wnmpk\") pod \"must-gather-srbbm\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.179096 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnmpk\" (UniqueName: \"kubernetes.io/projected/894bd1e5-3142-4804-8cb1-1afc854a92f1-kube-api-access-wnmpk\") pod \"must-gather-srbbm\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.179270 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/894bd1e5-3142-4804-8cb1-1afc854a92f1-must-gather-output\") pod \"must-gather-srbbm\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.179759 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/894bd1e5-3142-4804-8cb1-1afc854a92f1-must-gather-output\") pod \"must-gather-srbbm\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.199868 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnmpk\" (UniqueName: \"kubernetes.io/projected/894bd1e5-3142-4804-8cb1-1afc854a92f1-kube-api-access-wnmpk\") pod \"must-gather-srbbm\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.270106 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:46:26 crc kubenswrapper[4747]: I0202 09:46:26.901315 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xhpmv/must-gather-srbbm"] Feb 02 09:46:27 crc kubenswrapper[4747]: I0202 09:46:27.053280 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/must-gather-srbbm" event={"ID":"894bd1e5-3142-4804-8cb1-1afc854a92f1","Type":"ContainerStarted","Data":"421f64ed7e42d0a5809acc6ae6476b44e83300df46a638c418391b81ca2b99d8"} Feb 02 09:46:27 crc kubenswrapper[4747]: I0202 09:46:27.339481 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:46:27 crc kubenswrapper[4747]: E0202 09:46:27.339745 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:46:32 crc kubenswrapper[4747]: I0202 09:46:32.112266 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/must-gather-srbbm" event={"ID":"894bd1e5-3142-4804-8cb1-1afc854a92f1","Type":"ContainerStarted","Data":"9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c"} Feb 02 09:46:32 crc kubenswrapper[4747]: I0202 09:46:32.112669 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/must-gather-srbbm" event={"ID":"894bd1e5-3142-4804-8cb1-1afc854a92f1","Type":"ContainerStarted","Data":"65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b"} Feb 02 09:46:32 crc kubenswrapper[4747]: I0202 09:46:32.171244 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xhpmv/must-gather-srbbm" podStartSLOduration=3.166668314 podStartE2EDuration="7.17121787s" podCreationTimestamp="2026-02-02 09:46:25 +0000 UTC" firstStartedPulling="2026-02-02 09:46:26.899062666 +0000 UTC m=+2999.443401099" lastFinishedPulling="2026-02-02 09:46:30.903612222 +0000 UTC m=+3003.447950655" observedRunningTime="2026-02-02 09:46:32.160658618 +0000 UTC m=+3004.704997051" watchObservedRunningTime="2026-02-02 09:46:32.17121787 +0000 UTC m=+3004.715556323" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.514211 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-qv6dx"] Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.516175 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.518413 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xhpmv"/"default-dockercfg-d6z2n" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.645473 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf092a65-7c03-4cb6-a248-823c7361cb1a-host\") pod \"crc-debug-qv6dx\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.645641 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs64d\" (UniqueName: \"kubernetes.io/projected/cf092a65-7c03-4cb6-a248-823c7361cb1a-kube-api-access-xs64d\") pod \"crc-debug-qv6dx\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.747497 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs64d\" (UniqueName: \"kubernetes.io/projected/cf092a65-7c03-4cb6-a248-823c7361cb1a-kube-api-access-xs64d\") pod \"crc-debug-qv6dx\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.747609 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf092a65-7c03-4cb6-a248-823c7361cb1a-host\") pod \"crc-debug-qv6dx\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.747732 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf092a65-7c03-4cb6-a248-823c7361cb1a-host\") pod \"crc-debug-qv6dx\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.764515 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs64d\" (UniqueName: \"kubernetes.io/projected/cf092a65-7c03-4cb6-a248-823c7361cb1a-kube-api-access-xs64d\") pod \"crc-debug-qv6dx\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:34 crc kubenswrapper[4747]: I0202 09:46:34.837522 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:46:35 crc kubenswrapper[4747]: I0202 09:46:35.139156 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" event={"ID":"cf092a65-7c03-4cb6-a248-823c7361cb1a","Type":"ContainerStarted","Data":"d236d36977ffe64ea6969906c0fa521db9047b0d5a19a84c504001250e0e0540"} Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.496680 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nl8qf"] Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.500117 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.507991 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nl8qf"] Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.649266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-utilities\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.649696 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz8wm\" (UniqueName: \"kubernetes.io/projected/cd361de0-87e5-44af-895c-4fbcb9682a7d-kube-api-access-qz8wm\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.649797 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-catalog-content\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.752378 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-utilities\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.752796 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz8wm\" (UniqueName: \"kubernetes.io/projected/cd361de0-87e5-44af-895c-4fbcb9682a7d-kube-api-access-qz8wm\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.752896 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-catalog-content\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.753562 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-catalog-content\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.753966 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-utilities\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.789895 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz8wm\" (UniqueName: \"kubernetes.io/projected/cd361de0-87e5-44af-895c-4fbcb9682a7d-kube-api-access-qz8wm\") pod \"community-operators-nl8qf\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:39 crc kubenswrapper[4747]: I0202 09:46:39.828616 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:41 crc kubenswrapper[4747]: I0202 09:46:41.340087 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:46:41 crc kubenswrapper[4747]: E0202 09:46:41.340594 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:46:46 crc kubenswrapper[4747]: I0202 09:46:46.234155 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" event={"ID":"cf092a65-7c03-4cb6-a248-823c7361cb1a","Type":"ContainerStarted","Data":"3464b314d23686421a0f33cc8edf0d29fafe35b64af23bef4ac2e4acf05a5f15"} Feb 02 09:46:46 crc kubenswrapper[4747]: I0202 09:46:46.262311 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" podStartSLOduration=1.257185199 podStartE2EDuration="12.262291351s" podCreationTimestamp="2026-02-02 09:46:34 +0000 UTC" firstStartedPulling="2026-02-02 09:46:34.893120403 +0000 UTC m=+3007.437458836" lastFinishedPulling="2026-02-02 09:46:45.898226555 +0000 UTC m=+3018.442564988" observedRunningTime="2026-02-02 09:46:46.249901173 +0000 UTC m=+3018.794239606" watchObservedRunningTime="2026-02-02 09:46:46.262291351 +0000 UTC m=+3018.806629784" Feb 02 09:46:46 crc kubenswrapper[4747]: I0202 09:46:46.366772 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nl8qf"] Feb 02 09:46:47 crc kubenswrapper[4747]: I0202 09:46:47.245319 4747 generic.go:334] "Generic (PLEG): container finished" podID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerID="851e11d0d4e72df59d0edccde6c5b1ce3ff3f9ae693c153587c5a83da207712b" exitCode=0 Feb 02 09:46:47 crc kubenswrapper[4747]: I0202 09:46:47.245463 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl8qf" event={"ID":"cd361de0-87e5-44af-895c-4fbcb9682a7d","Type":"ContainerDied","Data":"851e11d0d4e72df59d0edccde6c5b1ce3ff3f9ae693c153587c5a83da207712b"} Feb 02 09:46:47 crc kubenswrapper[4747]: I0202 09:46:47.246140 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl8qf" event={"ID":"cd361de0-87e5-44af-895c-4fbcb9682a7d","Type":"ContainerStarted","Data":"42a36126e53616b58f7d5066f7d6d9b70cfb39303347545ce2578e12ff8313e2"} Feb 02 09:46:48 crc kubenswrapper[4747]: I0202 09:46:48.258138 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl8qf" event={"ID":"cd361de0-87e5-44af-895c-4fbcb9682a7d","Type":"ContainerStarted","Data":"00f08a302c2bb935331c1b45d6b1c994caac516912711dc65fb3e59aa3d42c1e"} Feb 02 09:46:49 crc kubenswrapper[4747]: I0202 09:46:49.269926 4747 generic.go:334] "Generic (PLEG): container finished" podID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerID="00f08a302c2bb935331c1b45d6b1c994caac516912711dc65fb3e59aa3d42c1e" exitCode=0 Feb 02 09:46:49 crc kubenswrapper[4747]: I0202 09:46:49.270045 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl8qf" event={"ID":"cd361de0-87e5-44af-895c-4fbcb9682a7d","Type":"ContainerDied","Data":"00f08a302c2bb935331c1b45d6b1c994caac516912711dc65fb3e59aa3d42c1e"} Feb 02 09:46:51 crc kubenswrapper[4747]: I0202 09:46:51.288908 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl8qf" event={"ID":"cd361de0-87e5-44af-895c-4fbcb9682a7d","Type":"ContainerStarted","Data":"5087cb09d67073ab5543f67f69d64302e7dd6a1238c1b27d8b32dea88ad09714"} Feb 02 09:46:51 crc kubenswrapper[4747]: I0202 09:46:51.324290 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nl8qf" podStartSLOduration=9.427137687 podStartE2EDuration="12.324263957s" podCreationTimestamp="2026-02-02 09:46:39 +0000 UTC" firstStartedPulling="2026-02-02 09:46:47.248366227 +0000 UTC m=+3019.792704660" lastFinishedPulling="2026-02-02 09:46:50.145492497 +0000 UTC m=+3022.689830930" observedRunningTime="2026-02-02 09:46:51.316109774 +0000 UTC m=+3023.860448227" watchObservedRunningTime="2026-02-02 09:46:51.324263957 +0000 UTC m=+3023.868602400" Feb 02 09:46:55 crc kubenswrapper[4747]: I0202 09:46:55.339590 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:46:55 crc kubenswrapper[4747]: E0202 09:46:55.340270 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:46:59 crc kubenswrapper[4747]: I0202 09:46:59.829476 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:59 crc kubenswrapper[4747]: I0202 09:46:59.830088 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:46:59 crc kubenswrapper[4747]: I0202 09:46:59.882623 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:47:00 crc kubenswrapper[4747]: I0202 09:47:00.433899 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:47:00 crc kubenswrapper[4747]: I0202 09:47:00.485215 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nl8qf"] Feb 02 09:47:02 crc kubenswrapper[4747]: I0202 09:47:02.521172 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nl8qf" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="registry-server" containerID="cri-o://5087cb09d67073ab5543f67f69d64302e7dd6a1238c1b27d8b32dea88ad09714" gracePeriod=2 Feb 02 09:47:03 crc kubenswrapper[4747]: I0202 09:47:03.533209 4747 generic.go:334] "Generic (PLEG): container finished" podID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerID="5087cb09d67073ab5543f67f69d64302e7dd6a1238c1b27d8b32dea88ad09714" exitCode=0 Feb 02 09:47:03 crc kubenswrapper[4747]: I0202 09:47:03.533255 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl8qf" event={"ID":"cd361de0-87e5-44af-895c-4fbcb9682a7d","Type":"ContainerDied","Data":"5087cb09d67073ab5543f67f69d64302e7dd6a1238c1b27d8b32dea88ad09714"} Feb 02 09:47:03 crc kubenswrapper[4747]: I0202 09:47:03.533727 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nl8qf" event={"ID":"cd361de0-87e5-44af-895c-4fbcb9682a7d","Type":"ContainerDied","Data":"42a36126e53616b58f7d5066f7d6d9b70cfb39303347545ce2578e12ff8313e2"} Feb 02 09:47:03 crc kubenswrapper[4747]: I0202 09:47:03.533743 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42a36126e53616b58f7d5066f7d6d9b70cfb39303347545ce2578e12ff8313e2" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.081822 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.170885 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-catalog-content\") pod \"cd361de0-87e5-44af-895c-4fbcb9682a7d\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.171090 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-utilities\") pod \"cd361de0-87e5-44af-895c-4fbcb9682a7d\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.171234 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz8wm\" (UniqueName: \"kubernetes.io/projected/cd361de0-87e5-44af-895c-4fbcb9682a7d-kube-api-access-qz8wm\") pod \"cd361de0-87e5-44af-895c-4fbcb9682a7d\" (UID: \"cd361de0-87e5-44af-895c-4fbcb9682a7d\") " Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.171627 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-utilities" (OuterVolumeSpecName: "utilities") pod "cd361de0-87e5-44af-895c-4fbcb9682a7d" (UID: "cd361de0-87e5-44af-895c-4fbcb9682a7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.171830 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.189114 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd361de0-87e5-44af-895c-4fbcb9682a7d-kube-api-access-qz8wm" (OuterVolumeSpecName: "kube-api-access-qz8wm") pod "cd361de0-87e5-44af-895c-4fbcb9682a7d" (UID: "cd361de0-87e5-44af-895c-4fbcb9682a7d"). InnerVolumeSpecName "kube-api-access-qz8wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.239140 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd361de0-87e5-44af-895c-4fbcb9682a7d" (UID: "cd361de0-87e5-44af-895c-4fbcb9682a7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.277278 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz8wm\" (UniqueName: \"kubernetes.io/projected/cd361de0-87e5-44af-895c-4fbcb9682a7d-kube-api-access-qz8wm\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.277324 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd361de0-87e5-44af-895c-4fbcb9682a7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.542205 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nl8qf" Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.580421 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nl8qf"] Feb 02 09:47:04 crc kubenswrapper[4747]: I0202 09:47:04.592599 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nl8qf"] Feb 02 09:47:06 crc kubenswrapper[4747]: I0202 09:47:06.339738 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:47:06 crc kubenswrapper[4747]: E0202 09:47:06.340460 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:47:06 crc kubenswrapper[4747]: I0202 09:47:06.354235 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" path="/var/lib/kubelet/pods/cd361de0-87e5-44af-895c-4fbcb9682a7d/volumes" Feb 02 09:47:18 crc kubenswrapper[4747]: I0202 09:47:18.346181 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:47:18 crc kubenswrapper[4747]: E0202 09:47:18.347084 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:47:28 crc kubenswrapper[4747]: I0202 09:47:28.756380 4747 generic.go:334] "Generic (PLEG): container finished" podID="cf092a65-7c03-4cb6-a248-823c7361cb1a" containerID="3464b314d23686421a0f33cc8edf0d29fafe35b64af23bef4ac2e4acf05a5f15" exitCode=0 Feb 02 09:47:28 crc kubenswrapper[4747]: I0202 09:47:28.756469 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" event={"ID":"cf092a65-7c03-4cb6-a248-823c7361cb1a","Type":"ContainerDied","Data":"3464b314d23686421a0f33cc8edf0d29fafe35b64af23bef4ac2e4acf05a5f15"} Feb 02 09:47:29 crc kubenswrapper[4747]: I0202 09:47:29.877607 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:47:29 crc kubenswrapper[4747]: I0202 09:47:29.909737 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-qv6dx"] Feb 02 09:47:29 crc kubenswrapper[4747]: I0202 09:47:29.918422 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-qv6dx"] Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.013266 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xs64d\" (UniqueName: \"kubernetes.io/projected/cf092a65-7c03-4cb6-a248-823c7361cb1a-kube-api-access-xs64d\") pod \"cf092a65-7c03-4cb6-a248-823c7361cb1a\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.013377 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf092a65-7c03-4cb6-a248-823c7361cb1a-host\") pod \"cf092a65-7c03-4cb6-a248-823c7361cb1a\" (UID: \"cf092a65-7c03-4cb6-a248-823c7361cb1a\") " Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.013428 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf092a65-7c03-4cb6-a248-823c7361cb1a-host" (OuterVolumeSpecName: "host") pod "cf092a65-7c03-4cb6-a248-823c7361cb1a" (UID: "cf092a65-7c03-4cb6-a248-823c7361cb1a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.013898 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cf092a65-7c03-4cb6-a248-823c7361cb1a-host\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.018481 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf092a65-7c03-4cb6-a248-823c7361cb1a-kube-api-access-xs64d" (OuterVolumeSpecName: "kube-api-access-xs64d") pod "cf092a65-7c03-4cb6-a248-823c7361cb1a" (UID: "cf092a65-7c03-4cb6-a248-823c7361cb1a"). InnerVolumeSpecName "kube-api-access-xs64d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.116184 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xs64d\" (UniqueName: \"kubernetes.io/projected/cf092a65-7c03-4cb6-a248-823c7361cb1a-kube-api-access-xs64d\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.343609 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:47:30 crc kubenswrapper[4747]: E0202 09:47:30.343827 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.355345 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf092a65-7c03-4cb6-a248-823c7361cb1a" path="/var/lib/kubelet/pods/cf092a65-7c03-4cb6-a248-823c7361cb1a/volumes" Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.776093 4747 scope.go:117] "RemoveContainer" containerID="3464b314d23686421a0f33cc8edf0d29fafe35b64af23bef4ac2e4acf05a5f15" Feb 02 09:47:30 crc kubenswrapper[4747]: I0202 09:47:30.776162 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-qv6dx" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.115720 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-rd2xh"] Feb 02 09:47:31 crc kubenswrapper[4747]: E0202 09:47:31.116333 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="extract-utilities" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.116346 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="extract-utilities" Feb 02 09:47:31 crc kubenswrapper[4747]: E0202 09:47:31.116369 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="extract-content" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.116375 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="extract-content" Feb 02 09:47:31 crc kubenswrapper[4747]: E0202 09:47:31.116395 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="registry-server" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.116402 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="registry-server" Feb 02 09:47:31 crc kubenswrapper[4747]: E0202 09:47:31.116418 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf092a65-7c03-4cb6-a248-823c7361cb1a" containerName="container-00" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.116424 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf092a65-7c03-4cb6-a248-823c7361cb1a" containerName="container-00" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.116586 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd361de0-87e5-44af-895c-4fbcb9682a7d" containerName="registry-server" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.116600 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf092a65-7c03-4cb6-a248-823c7361cb1a" containerName="container-00" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.117188 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.120202 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xhpmv"/"default-dockercfg-d6z2n" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.239916 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjtvj\" (UniqueName: \"kubernetes.io/projected/e7d267a1-6928-4ad1-85af-5187456433c7-kube-api-access-sjtvj\") pod \"crc-debug-rd2xh\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.240060 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e7d267a1-6928-4ad1-85af-5187456433c7-host\") pod \"crc-debug-rd2xh\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.341127 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjtvj\" (UniqueName: \"kubernetes.io/projected/e7d267a1-6928-4ad1-85af-5187456433c7-kube-api-access-sjtvj\") pod \"crc-debug-rd2xh\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.341209 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e7d267a1-6928-4ad1-85af-5187456433c7-host\") pod \"crc-debug-rd2xh\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.341302 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e7d267a1-6928-4ad1-85af-5187456433c7-host\") pod \"crc-debug-rd2xh\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.368963 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjtvj\" (UniqueName: \"kubernetes.io/projected/e7d267a1-6928-4ad1-85af-5187456433c7-kube-api-access-sjtvj\") pod \"crc-debug-rd2xh\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.439208 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.794751 4747 generic.go:334] "Generic (PLEG): container finished" podID="e7d267a1-6928-4ad1-85af-5187456433c7" containerID="67fbb2403b49f18275eedd01debf921a947207a75ed8ce11b18115edb1f21daf" exitCode=0 Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.795119 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" event={"ID":"e7d267a1-6928-4ad1-85af-5187456433c7","Type":"ContainerDied","Data":"67fbb2403b49f18275eedd01debf921a947207a75ed8ce11b18115edb1f21daf"} Feb 02 09:47:31 crc kubenswrapper[4747]: I0202 09:47:31.795150 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" event={"ID":"e7d267a1-6928-4ad1-85af-5187456433c7","Type":"ContainerStarted","Data":"da238bbf4bee2cfc4dbdda42b7687c34e3c9676221d933712cfed1ba0bc884fe"} Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.196639 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-rd2xh"] Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.206253 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-rd2xh"] Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.912102 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.970156 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjtvj\" (UniqueName: \"kubernetes.io/projected/e7d267a1-6928-4ad1-85af-5187456433c7-kube-api-access-sjtvj\") pod \"e7d267a1-6928-4ad1-85af-5187456433c7\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.970231 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e7d267a1-6928-4ad1-85af-5187456433c7-host\") pod \"e7d267a1-6928-4ad1-85af-5187456433c7\" (UID: \"e7d267a1-6928-4ad1-85af-5187456433c7\") " Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.970423 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e7d267a1-6928-4ad1-85af-5187456433c7-host" (OuterVolumeSpecName: "host") pod "e7d267a1-6928-4ad1-85af-5187456433c7" (UID: "e7d267a1-6928-4ad1-85af-5187456433c7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.970825 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e7d267a1-6928-4ad1-85af-5187456433c7-host\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:32 crc kubenswrapper[4747]: I0202 09:47:32.976263 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7d267a1-6928-4ad1-85af-5187456433c7-kube-api-access-sjtvj" (OuterVolumeSpecName: "kube-api-access-sjtvj") pod "e7d267a1-6928-4ad1-85af-5187456433c7" (UID: "e7d267a1-6928-4ad1-85af-5187456433c7"). InnerVolumeSpecName "kube-api-access-sjtvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.072605 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjtvj\" (UniqueName: \"kubernetes.io/projected/e7d267a1-6928-4ad1-85af-5187456433c7-kube-api-access-sjtvj\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.376142 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-w7bv5"] Feb 02 09:47:33 crc kubenswrapper[4747]: E0202 09:47:33.376501 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d267a1-6928-4ad1-85af-5187456433c7" containerName="container-00" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.376512 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d267a1-6928-4ad1-85af-5187456433c7" containerName="container-00" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.376692 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7d267a1-6928-4ad1-85af-5187456433c7" containerName="container-00" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.377367 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.479665 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8znp\" (UniqueName: \"kubernetes.io/projected/7798af34-cba4-4584-bfd2-18369d05ef39-kube-api-access-w8znp\") pod \"crc-debug-w7bv5\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.479756 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7798af34-cba4-4584-bfd2-18369d05ef39-host\") pod \"crc-debug-w7bv5\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.581450 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8znp\" (UniqueName: \"kubernetes.io/projected/7798af34-cba4-4584-bfd2-18369d05ef39-kube-api-access-w8znp\") pod \"crc-debug-w7bv5\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.581855 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7798af34-cba4-4584-bfd2-18369d05ef39-host\") pod \"crc-debug-w7bv5\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.582113 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7798af34-cba4-4584-bfd2-18369d05ef39-host\") pod \"crc-debug-w7bv5\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.597770 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8znp\" (UniqueName: \"kubernetes.io/projected/7798af34-cba4-4584-bfd2-18369d05ef39-kube-api-access-w8znp\") pod \"crc-debug-w7bv5\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.695095 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:33 crc kubenswrapper[4747]: W0202 09:47:33.738324 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7798af34_cba4_4584_bfd2_18369d05ef39.slice/crio-ed6062bd3db43aeb819e035b94ecd1ab2cdb0ff7cb198e75007caedd1dc6e39f WatchSource:0}: Error finding container ed6062bd3db43aeb819e035b94ecd1ab2cdb0ff7cb198e75007caedd1dc6e39f: Status 404 returned error can't find the container with id ed6062bd3db43aeb819e035b94ecd1ab2cdb0ff7cb198e75007caedd1dc6e39f Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.814872 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-rd2xh" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.815005 4747 scope.go:117] "RemoveContainer" containerID="67fbb2403b49f18275eedd01debf921a947207a75ed8ce11b18115edb1f21daf" Feb 02 09:47:33 crc kubenswrapper[4747]: I0202 09:47:33.825197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" event={"ID":"7798af34-cba4-4584-bfd2-18369d05ef39","Type":"ContainerStarted","Data":"ed6062bd3db43aeb819e035b94ecd1ab2cdb0ff7cb198e75007caedd1dc6e39f"} Feb 02 09:47:34 crc kubenswrapper[4747]: I0202 09:47:34.360241 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7d267a1-6928-4ad1-85af-5187456433c7" path="/var/lib/kubelet/pods/e7d267a1-6928-4ad1-85af-5187456433c7/volumes" Feb 02 09:47:34 crc kubenswrapper[4747]: I0202 09:47:34.834300 4747 generic.go:334] "Generic (PLEG): container finished" podID="7798af34-cba4-4584-bfd2-18369d05ef39" containerID="c2480b2e2f89186924e52a4006e4cca62cb4c2e4e38c4264f495b877011b9859" exitCode=0 Feb 02 09:47:34 crc kubenswrapper[4747]: I0202 09:47:34.834380 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" event={"ID":"7798af34-cba4-4584-bfd2-18369d05ef39","Type":"ContainerDied","Data":"c2480b2e2f89186924e52a4006e4cca62cb4c2e4e38c4264f495b877011b9859"} Feb 02 09:47:34 crc kubenswrapper[4747]: I0202 09:47:34.873943 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-w7bv5"] Feb 02 09:47:34 crc kubenswrapper[4747]: I0202 09:47:34.882159 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xhpmv/crc-debug-w7bv5"] Feb 02 09:47:35 crc kubenswrapper[4747]: I0202 09:47:35.956955 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.029768 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8znp\" (UniqueName: \"kubernetes.io/projected/7798af34-cba4-4584-bfd2-18369d05ef39-kube-api-access-w8znp\") pod \"7798af34-cba4-4584-bfd2-18369d05ef39\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.029817 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7798af34-cba4-4584-bfd2-18369d05ef39-host\") pod \"7798af34-cba4-4584-bfd2-18369d05ef39\" (UID: \"7798af34-cba4-4584-bfd2-18369d05ef39\") " Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.030004 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7798af34-cba4-4584-bfd2-18369d05ef39-host" (OuterVolumeSpecName: "host") pod "7798af34-cba4-4584-bfd2-18369d05ef39" (UID: "7798af34-cba4-4584-bfd2-18369d05ef39"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.030415 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7798af34-cba4-4584-bfd2-18369d05ef39-host\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.035503 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7798af34-cba4-4584-bfd2-18369d05ef39-kube-api-access-w8znp" (OuterVolumeSpecName: "kube-api-access-w8znp") pod "7798af34-cba4-4584-bfd2-18369d05ef39" (UID: "7798af34-cba4-4584-bfd2-18369d05ef39"). InnerVolumeSpecName "kube-api-access-w8znp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.132037 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8znp\" (UniqueName: \"kubernetes.io/projected/7798af34-cba4-4584-bfd2-18369d05ef39-kube-api-access-w8znp\") on node \"crc\" DevicePath \"\"" Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.349411 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7798af34-cba4-4584-bfd2-18369d05ef39" path="/var/lib/kubelet/pods/7798af34-cba4-4584-bfd2-18369d05ef39/volumes" Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.852808 4747 scope.go:117] "RemoveContainer" containerID="c2480b2e2f89186924e52a4006e4cca62cb4c2e4e38c4264f495b877011b9859" Feb 02 09:47:36 crc kubenswrapper[4747]: I0202 09:47:36.852868 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/crc-debug-w7bv5" Feb 02 09:47:44 crc kubenswrapper[4747]: I0202 09:47:44.339679 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:47:44 crc kubenswrapper[4747]: E0202 09:47:44.340513 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:47:50 crc kubenswrapper[4747]: I0202 09:47:50.395292 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6cf448654b-n4qqr_8f344eda-fa92-4465-9749-057b27fc8741/barbican-api/0.log" Feb 02 09:47:50 crc kubenswrapper[4747]: I0202 09:47:50.578064 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6cf448654b-n4qqr_8f344eda-fa92-4465-9749-057b27fc8741/barbican-api-log/0.log" Feb 02 09:47:50 crc kubenswrapper[4747]: I0202 09:47:50.611630 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-569dcfffb-qv7m8_e2cbf327-44a0-4a40-8bf5-ef350dba55b7/barbican-keystone-listener/0.log" Feb 02 09:47:50 crc kubenswrapper[4747]: I0202 09:47:50.673460 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-569dcfffb-qv7m8_e2cbf327-44a0-4a40-8bf5-ef350dba55b7/barbican-keystone-listener-log/0.log" Feb 02 09:47:50 crc kubenswrapper[4747]: I0202 09:47:50.848196 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bd47ccdc7-8zwk7_7ccfda3d-1736-4cb1-a059-3e5508f95148/barbican-worker-log/0.log" Feb 02 09:47:50 crc kubenswrapper[4747]: I0202 09:47:50.878541 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bd47ccdc7-8zwk7_7ccfda3d-1736-4cb1-a059-3e5508f95148/barbican-worker/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.056620 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-zth7t_5f3a380a-d55f-4522-a962-4003519edb27/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.079863 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_aaac5a6d-0200-4405-bf3f-d9e46177cc05/ceilometer-central-agent/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.146589 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_aaac5a6d-0200-4405-bf3f-d9e46177cc05/ceilometer-notification-agent/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.246149 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_aaac5a6d-0200-4405-bf3f-d9e46177cc05/proxy-httpd/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.323196 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_aaac5a6d-0200-4405-bf3f-d9e46177cc05/sg-core/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.333054 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_28634e0e-24a0-41f6-84ad-ffea557b14ed/cinder-api/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.462693 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_28634e0e-24a0-41f6-84ad-ffea557b14ed/cinder-api-log/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.568400 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bfd38cbd-124d-4f9a-9b9b-d724b277fbcb/probe/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.617468 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bfd38cbd-124d-4f9a-9b9b-d724b277fbcb/cinder-scheduler/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.779880 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-phpk2_b9b9686a-9848-43fb-90fd-2986e723d282/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.815341 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-z2m9h_4d06633a-c9ea-4ae5-a60e-febcf39cead2/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:51 crc kubenswrapper[4747]: I0202 09:47:51.969796 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-qdz6h_98cae643-025f-440f-8c6b-84d9589ab11c/init/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.145643 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-qdz6h_98cae643-025f-440f-8c6b-84d9589ab11c/init/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.157709 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-qdz6h_98cae643-025f-440f-8c6b-84d9589ab11c/dnsmasq-dns/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.190139 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tql4j_b18e7fd9-2c50-4d27-b7ab-c525aa31a768/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.357917 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_bba7d405-55c1-4fd1-91ba-d1a235f09160/glance-httpd/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.365444 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_bba7d405-55c1-4fd1-91ba-d1a235f09160/glance-log/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.532181 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d6b158e0-57fd-41f8-8d2d-462d6da18ab0/glance-log/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.545828 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d6b158e0-57fd-41f8-8d2d-462d6da18ab0/glance-httpd/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.713669 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7495bf65bd-857k2_2860dde6-602b-417e-9819-6ce526ed2eb9/horizon/0.log" Feb 02 09:47:52 crc kubenswrapper[4747]: I0202 09:47:52.811599 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-vn6w9_fb2b09c4-09a2-44d5-8232-5b3e25921596/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:53 crc kubenswrapper[4747]: I0202 09:47:53.047574 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-w5442_5eba9605-8814-46f8-be9d-5a931e56c782/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:53 crc kubenswrapper[4747]: I0202 09:47:53.054199 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7495bf65bd-857k2_2860dde6-602b-417e-9819-6ce526ed2eb9/horizon-log/0.log" Feb 02 09:47:53 crc kubenswrapper[4747]: I0202 09:47:53.244654 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_634a083b-144a-4785-b8d2-aa33c1b9c423/kube-state-metrics/0.log" Feb 02 09:47:53 crc kubenswrapper[4747]: I0202 09:47:53.305656 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-559c565ccd-md2lx_693c0cdd-1115-4a68-9f3e-4437ce703788/keystone-api/0.log" Feb 02 09:47:53 crc kubenswrapper[4747]: I0202 09:47:53.449266 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-zjqqc_5e4244f7-511f-4193-b74d-6d018e944b45/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:53 crc kubenswrapper[4747]: I0202 09:47:53.811078 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7fb8f585c-hlmn8_884e89eb-4a79-4086-8e3c-bc521ff7db35/neutron-httpd/0.log" Feb 02 09:47:53 crc kubenswrapper[4747]: I0202 09:47:53.862774 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7fb8f585c-hlmn8_884e89eb-4a79-4086-8e3c-bc521ff7db35/neutron-api/0.log" Feb 02 09:47:54 crc kubenswrapper[4747]: I0202 09:47:54.023542 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-94z5c_0238bbca-ba9a-4e80-bdc3-1fc0467c30c8/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:54 crc kubenswrapper[4747]: I0202 09:47:54.489307 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4be5b7d2-85d1-4f15-bcc3-991a97f5fcec/nova-cell0-conductor-conductor/0.log" Feb 02 09:47:54 crc kubenswrapper[4747]: I0202 09:47:54.548506 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3909e3d2-a56c-4a5e-ac77-d65f07d595de/nova-api-log/0.log" Feb 02 09:47:54 crc kubenswrapper[4747]: I0202 09:47:54.628552 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3909e3d2-a56c-4a5e-ac77-d65f07d595de/nova-api-api/0.log" Feb 02 09:47:54 crc kubenswrapper[4747]: I0202 09:47:54.826314 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_ec1fef68-e209-465e-a237-cce8f03abf20/nova-cell1-conductor-conductor/0.log" Feb 02 09:47:54 crc kubenswrapper[4747]: I0202 09:47:54.855009 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_329eee3d-3827-486f-bd32-f72c288610a2/nova-cell1-novncproxy-novncproxy/0.log" Feb 02 09:47:55 crc kubenswrapper[4747]: I0202 09:47:55.109405 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-fpkts_a072b9d8-fc03-4e5f-9470-458501cb4a01/nova-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:55 crc kubenswrapper[4747]: I0202 09:47:55.172224 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d6c1b359-f618-48a3-9ef9-f13da6cc85a9/nova-metadata-log/0.log" Feb 02 09:47:55 crc kubenswrapper[4747]: I0202 09:47:55.483981 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_5053c6b4-d732-46f3-a4ee-29fad06b06fb/nova-scheduler-scheduler/0.log" Feb 02 09:47:55 crc kubenswrapper[4747]: I0202 09:47:55.579876 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_03897c48-cfa1-4875-bd93-7b645923f47e/mysql-bootstrap/0.log" Feb 02 09:47:55 crc kubenswrapper[4747]: I0202 09:47:55.784418 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_03897c48-cfa1-4875-bd93-7b645923f47e/galera/0.log" Feb 02 09:47:55 crc kubenswrapper[4747]: I0202 09:47:55.794947 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_03897c48-cfa1-4875-bd93-7b645923f47e/mysql-bootstrap/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.014736 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_3089b6b4-c4d3-4717-a7d7-159dd27863ac/mysql-bootstrap/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.163630 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d6c1b359-f618-48a3-9ef9-f13da6cc85a9/nova-metadata-metadata/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.234881 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_3089b6b4-c4d3-4717-a7d7-159dd27863ac/galera/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.260976 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_3089b6b4-c4d3-4717-a7d7-159dd27863ac/mysql-bootstrap/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.339469 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:47:56 crc kubenswrapper[4747]: E0202 09:47:56.339826 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.449526 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_54560832-e0ee-4493-a567-b4a3e7ca4e8f/openstackclient/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.604997 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-cqqzs_a214831a-017f-45e4-9040-e9d7c8db06f7/openstack-network-exporter/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.701229 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bfdw2_de0ee51c-e1b1-4614-83bb-07a2d682694b/ovsdb-server-init/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.884571 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bfdw2_de0ee51c-e1b1-4614-83bb-07a2d682694b/ovsdb-server-init/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.940330 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bfdw2_de0ee51c-e1b1-4614-83bb-07a2d682694b/ovs-vswitchd/0.log" Feb 02 09:47:56 crc kubenswrapper[4747]: I0202 09:47:56.951923 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bfdw2_de0ee51c-e1b1-4614-83bb-07a2d682694b/ovsdb-server/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.122696 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-vhb5g_6054cae9-07d3-4de6-ad28-2be1334c85c5/ovn-controller/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.198449 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-6xm8j_2c161d1a-b3d3-4679-b869-fd3cafa040c8/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.325316 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_29b3b6f8-cfec-49c7-aca0-37647aacc62b/openstack-network-exporter/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.413028 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_29b3b6f8-cfec-49c7-aca0-37647aacc62b/ovn-northd/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.461081 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_59c1604c-b4d8-4717-a68d-e372953a8a3f/openstack-network-exporter/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.580753 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_59c1604c-b4d8-4717-a68d-e372953a8a3f/ovsdbserver-nb/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.669840 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_75232e08-a5a1-4971-893e-24c3503ff693/openstack-network-exporter/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.714473 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_75232e08-a5a1-4971-893e-24c3503ff693/ovsdbserver-sb/0.log" Feb 02 09:47:57 crc kubenswrapper[4747]: I0202 09:47:57.939662 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75b94989dd-wzr2t_10126fda-de55-4027-9446-86789ba2852f/placement-api/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.025186 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-75b94989dd-wzr2t_10126fda-de55-4027-9446-86789ba2852f/placement-log/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.121669 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7eee7ec2-61e8-40b8-86c6-618d811a6b58/setup-container/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.293227 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7eee7ec2-61e8-40b8-86c6-618d811a6b58/setup-container/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.372674 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7eee7ec2-61e8-40b8-86c6-618d811a6b58/rabbitmq/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.405840 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff8e9063-bf18-45e0-92ef-81bc7eee9d50/setup-container/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.587202 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff8e9063-bf18-45e0-92ef-81bc7eee9d50/rabbitmq/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.601465 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff8e9063-bf18-45e0-92ef-81bc7eee9d50/setup-container/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.690980 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-x8fbt_8cc0ed13-8498-4a1b-9728-47c6accd7128/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:58 crc kubenswrapper[4747]: I0202 09:47:58.774490 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-jwfdr_cde86100-fdda-4f1b-b549-b87b483e3859/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.055710 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-59zpr_f572a805-a83f-44fa-a82c-dcbd3b154be6/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.236432 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-bmfbf_17233d25-c081-446c-a7a9-2967a227c731/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.304137 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-72dfk_1e97ae5d-7c9d-423b-8fbb-c00644f23335/ssh-known-hosts-edpm-deployment/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.581526 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-67d8547547-ntqwl_c6682738-3d59-4dee-aca4-df90ecf8179e/proxy-httpd/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.608331 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-67d8547547-ntqwl_c6682738-3d59-4dee-aca4-df90ecf8179e/proxy-server/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.610678 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-tzwjl_662dc11b-2635-4ef2-a109-900fa5c109fb/swift-ring-rebalance/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.800265 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/account-auditor/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.821753 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/account-replicator/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.842181 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/account-reaper/0.log" Feb 02 09:47:59 crc kubenswrapper[4747]: I0202 09:47:59.992137 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/account-server/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.038598 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/container-auditor/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.088604 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/container-server/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.120756 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/container-replicator/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.234349 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/container-updater/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.257776 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/object-auditor/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.333636 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/object-expirer/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.345680 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/object-replicator/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.443004 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/object-server/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.453346 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/object-updater/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.557822 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/rsync/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.571138 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30a3d44f-2ad1-4d00-824e-1e1cdaa048ad/swift-recon-cron/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.727921 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-cklb6_efbfdb02-1bdd-471d-9054-a59de7b96f4c/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:48:00 crc kubenswrapper[4747]: I0202 09:48:00.810448 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_84239355-717c-437f-abf5-b5df1b3a0806/tempest-tests-tempest-tests-runner/0.log" Feb 02 09:48:01 crc kubenswrapper[4747]: I0202 09:48:01.012588 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_5c1cf2d0-f9fb-494c-908f-5ccabec6364a/test-operator-logs-container/0.log" Feb 02 09:48:01 crc kubenswrapper[4747]: I0202 09:48:01.076526 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-gxqzp_464e38ca-20bd-44ab-80de-f991f6bb7909/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 09:48:07 crc kubenswrapper[4747]: I0202 09:48:07.805692 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_b139661f-2ea9-4fdd-bb8b-a48d41c0b3cf/memcached/0.log" Feb 02 09:48:09 crc kubenswrapper[4747]: I0202 09:48:09.339677 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:48:09 crc kubenswrapper[4747]: E0202 09:48:09.340207 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:48:20 crc kubenswrapper[4747]: I0202 09:48:20.344631 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:48:20 crc kubenswrapper[4747]: E0202 09:48:20.345375 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:48:25 crc kubenswrapper[4747]: I0202 09:48:25.607251 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-zqj92_db5ae332-d8f3-41a2-9c21-45ff5536cbb8/manager/0.log" Feb 02 09:48:25 crc kubenswrapper[4747]: I0202 09:48:25.728898 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-7kw8z_4923ca42-a876-42cd-b992-21573dde4361/manager/0.log" Feb 02 09:48:25 crc kubenswrapper[4747]: I0202 09:48:25.821957 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-gg5lg_da38d39a-f91a-42d4-a773-5fd894e74305/manager/0.log" Feb 02 09:48:25 crc kubenswrapper[4747]: I0202 09:48:25.933487 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2_ce418e47-dd96-4628-b622-6083b3955e7b/util/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.127432 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2_ce418e47-dd96-4628-b622-6083b3955e7b/util/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.127806 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2_ce418e47-dd96-4628-b622-6083b3955e7b/pull/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.183348 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2_ce418e47-dd96-4628-b622-6083b3955e7b/pull/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.391003 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2_ce418e47-dd96-4628-b622-6083b3955e7b/pull/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.406367 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2_ce418e47-dd96-4628-b622-6083b3955e7b/util/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.418036 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e04435823cb0c0fc9296188562163e0e81bbaeb52a92fa7afd8baaba34tdtx2_ce418e47-dd96-4628-b622-6083b3955e7b/extract/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.633568 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-hqm5n_38cdc2d7-bf0e-499c-9953-5f3088714675/manager/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.648105 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-g5qsx_d2a8dd02-a258-40be-ae2b-8c4d8f093870/manager/0.log" Feb 02 09:48:26 crc kubenswrapper[4747]: I0202 09:48:26.849880 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-nmsdj_c01d1e85-b676-404c-8565-900de1d7b9ff/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.053339 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-qlc6w_9069468d-21ec-4ca1-8c03-e35555180a9a/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.215267 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f4b8bd54d-rtv8v_e7700f72-6bbd-4ba3-9835-3665f7b1df89/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.331617 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-2rmn5_1c1d1d29-d3e3-4569-a8ef-f68a5dee0242/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.411853 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-lqfh2_2343f1c6-2d43-468e-a2ea-7e6b2b915cec/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.484339 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-6fx7k_e7c8163b-0e29-4cdc-a5d0-5d139e47f1e3/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.680875 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-66cdn_200b1a45-bbca-460b-a578-2c913f0075f9/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.778554 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-hhlsd_2cddc38a-7d58-47ec-a296-b0447a8b67c4/manager/0.log" Feb 02 09:48:27 crc kubenswrapper[4747]: I0202 09:48:27.935584 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-ccljn_2def646c-f3ea-46d4-9003-ea05abd176d7/manager/0.log" Feb 02 09:48:28 crc kubenswrapper[4747]: I0202 09:48:28.070197 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4drf4zb_b9cdd96d-49df-489b-9e07-0529338f4b78/manager/0.log" Feb 02 09:48:28 crc kubenswrapper[4747]: I0202 09:48:28.245602 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-5b57c84fd5-pljbc_8ced9c16-9a58-4416-aadc-23d33fbd8c2c/operator/0.log" Feb 02 09:48:28 crc kubenswrapper[4747]: I0202 09:48:28.488254 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-2k92r_b2cdcde8-4fea-4642-954f-eb13afd581f7/registry-server/0.log" Feb 02 09:48:28 crc kubenswrapper[4747]: I0202 09:48:28.657986 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-5gqtj_cbb834cd-ddc7-462f-8b67-d8466ce5f53e/manager/0.log" Feb 02 09:48:28 crc kubenswrapper[4747]: I0202 09:48:28.812765 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-fx5wk_b90291c1-9921-431d-b77d-4196cb5219df/manager/0.log" Feb 02 09:48:28 crc kubenswrapper[4747]: I0202 09:48:28.944826 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-jrklv_c425595e-72b6-48f6-91fc-0469ac7a634e/operator/0.log" Feb 02 09:48:29 crc kubenswrapper[4747]: I0202 09:48:29.210342 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-np7nt_409bac45-bf99-4268-aa3c-19e9d7392932/manager/0.log" Feb 02 09:48:29 crc kubenswrapper[4747]: I0202 09:48:29.322685 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-75d6c7dbc6-wphwt_1c82e754-744c-49e4-9ec9-3d8dada42adf/manager/0.log" Feb 02 09:48:29 crc kubenswrapper[4747]: I0202 09:48:29.404684 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-64b5b76f97-sp2rh_ebea840f-6b28-4ed1-8483-084f00350673/manager/0.log" Feb 02 09:48:29 crc kubenswrapper[4747]: I0202 09:48:29.511299 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-qf4mt_cfd44a3c-5745-48af-b70c-86402a61492e/manager/0.log" Feb 02 09:48:29 crc kubenswrapper[4747]: I0202 09:48:29.632553 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-qpfgd_0fcec9a4-9a0c-4013-a44b-2cd47a35e3ae/manager/0.log" Feb 02 09:48:35 crc kubenswrapper[4747]: I0202 09:48:35.340806 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:48:35 crc kubenswrapper[4747]: E0202 09:48:35.341421 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:48:47 crc kubenswrapper[4747]: I0202 09:48:47.339686 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:48:47 crc kubenswrapper[4747]: E0202 09:48:47.340395 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:48:48 crc kubenswrapper[4747]: I0202 09:48:48.798556 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-tmxxh_4f791e66-ece7-47e4-a9bb-71c940fc336d/control-plane-machine-set-operator/0.log" Feb 02 09:48:49 crc kubenswrapper[4747]: I0202 09:48:49.143127 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-xm7wg_c63a52e2-6fdd-4b79-b054-669bcc611dcb/machine-api-operator/0.log" Feb 02 09:48:49 crc kubenswrapper[4747]: I0202 09:48:49.178153 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-xm7wg_c63a52e2-6fdd-4b79-b054-669bcc611dcb/kube-rbac-proxy/0.log" Feb 02 09:49:01 crc kubenswrapper[4747]: I0202 09:49:01.364422 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:49:01 crc kubenswrapper[4747]: E0202 09:49:01.374709 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:49:02 crc kubenswrapper[4747]: I0202 09:49:02.270272 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-lgx2r_7a38108b-02cd-4489-88df-5b532a16b031/cert-manager-controller/0.log" Feb 02 09:49:02 crc kubenswrapper[4747]: I0202 09:49:02.462423 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-64kvl_71ba9c61-d5e8-4688-8b1b-3eaaa5f2d6a7/cert-manager-cainjector/0.log" Feb 02 09:49:02 crc kubenswrapper[4747]: I0202 09:49:02.481899 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-lrtlk_918034e7-cc57-4704-9891-5c2405668e2e/cert-manager-webhook/0.log" Feb 02 09:49:14 crc kubenswrapper[4747]: I0202 09:49:14.263503 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-5zpv9_8efb24a8-e59e-47da-a5eb-253fd3b215c1/nmstate-console-plugin/0.log" Feb 02 09:49:14 crc kubenswrapper[4747]: I0202 09:49:14.343083 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:49:14 crc kubenswrapper[4747]: E0202 09:49:14.343372 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:49:14 crc kubenswrapper[4747]: I0202 09:49:14.483113 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-dmcw9_273a950f-b0c3-4f52-be28-cae9de106aaf/nmstate-handler/0.log" Feb 02 09:49:14 crc kubenswrapper[4747]: I0202 09:49:14.555602 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-8zx7l_622130de-c2ad-4b1f-90bc-78f0173d2fe2/kube-rbac-proxy/0.log" Feb 02 09:49:14 crc kubenswrapper[4747]: I0202 09:49:14.590427 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-8zx7l_622130de-c2ad-4b1f-90bc-78f0173d2fe2/nmstate-metrics/0.log" Feb 02 09:49:14 crc kubenswrapper[4747]: I0202 09:49:14.714202 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-l87fs_2d26069a-13b2-4f8a-ba4b-a25bfd428db3/nmstate-operator/0.log" Feb 02 09:49:14 crc kubenswrapper[4747]: I0202 09:49:14.785815 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-8t7kj_e354bf65-f7c8-4fab-a288-ff0ffb879c62/nmstate-webhook/0.log" Feb 02 09:49:29 crc kubenswrapper[4747]: I0202 09:49:29.340160 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:49:29 crc kubenswrapper[4747]: E0202 09:49:29.341148 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.244752 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-mmkc8_cae8a10b-ff3b-42ac-b7a3-326f049a49ba/kube-rbac-proxy/0.log" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.345194 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-mmkc8_cae8a10b-ff3b-42ac-b7a3-326f049a49ba/controller/0.log" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.470824 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-ms2gx_4b0b5a28-658a-4507-a310-e983c5ef57db/frr-k8s-webhook-server/0.log" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.566198 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-frr-files/0.log" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.762777 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-reloader/0.log" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.781147 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-metrics/0.log" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.781342 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-frr-files/0.log" Feb 02 09:49:40 crc kubenswrapper[4747]: I0202 09:49:40.848091 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-reloader/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.045954 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-metrics/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.051344 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-reloader/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.071222 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-frr-files/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.086148 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-metrics/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.246618 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-reloader/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.247273 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-frr-files/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.269379 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/cp-metrics/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.287129 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/controller/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.463000 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/frr-metrics/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.504597 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/kube-rbac-proxy/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.547339 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/kube-rbac-proxy-frr/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.729104 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/reloader/0.log" Feb 02 09:49:41 crc kubenswrapper[4747]: I0202 09:49:41.851259 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6bf46c8785-7g5mj_cf0b4df2-65a8-4cfa-a77f-a52634ce2b49/manager/0.log" Feb 02 09:49:42 crc kubenswrapper[4747]: I0202 09:49:42.002204 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6d99b6746d-8rlqn_ba335bdb-560d-4051-8f28-89dab7f4f9cb/webhook-server/0.log" Feb 02 09:49:42 crc kubenswrapper[4747]: I0202 09:49:42.263649 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dxs77_ec6bb1cb-4fbe-477e-8635-42c2f087c3d4/kube-rbac-proxy/0.log" Feb 02 09:49:42 crc kubenswrapper[4747]: I0202 09:49:42.750532 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dxs77_ec6bb1cb-4fbe-477e-8635-42c2f087c3d4/speaker/0.log" Feb 02 09:49:42 crc kubenswrapper[4747]: I0202 09:49:42.869005 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-xrd4k_28a1eb86-8f46-4fbb-9f01-3f5e20b7bc7f/frr/0.log" Feb 02 09:49:43 crc kubenswrapper[4747]: I0202 09:49:43.339142 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:49:43 crc kubenswrapper[4747]: E0202 09:49:43.339486 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:49:54 crc kubenswrapper[4747]: I0202 09:49:54.340106 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:49:54 crc kubenswrapper[4747]: E0202 09:49:54.341814 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:49:54 crc kubenswrapper[4747]: I0202 09:49:54.628481 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8_f74f35ee-5609-4669-a870-a67f99347446/util/0.log" Feb 02 09:49:54 crc kubenswrapper[4747]: I0202 09:49:54.892517 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8_f74f35ee-5609-4669-a870-a67f99347446/util/0.log" Feb 02 09:49:54 crc kubenswrapper[4747]: I0202 09:49:54.908827 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8_f74f35ee-5609-4669-a870-a67f99347446/pull/0.log" Feb 02 09:49:54 crc kubenswrapper[4747]: I0202 09:49:54.923722 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8_f74f35ee-5609-4669-a870-a67f99347446/pull/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.067956 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8_f74f35ee-5609-4669-a870-a67f99347446/pull/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.138904 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8_f74f35ee-5609-4669-a870-a67f99347446/extract/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.140769 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dccv7z8_f74f35ee-5609-4669-a870-a67f99347446/util/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.274728 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh_7cdc4e0f-2a37-4329-82df-e8a06f3d50db/util/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.465620 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh_7cdc4e0f-2a37-4329-82df-e8a06f3d50db/pull/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.474003 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh_7cdc4e0f-2a37-4329-82df-e8a06f3d50db/util/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.509119 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh_7cdc4e0f-2a37-4329-82df-e8a06f3d50db/pull/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.658331 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh_7cdc4e0f-2a37-4329-82df-e8a06f3d50db/util/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.662464 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh_7cdc4e0f-2a37-4329-82df-e8a06f3d50db/pull/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.701330 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713wktwh_7cdc4e0f-2a37-4329-82df-e8a06f3d50db/extract/0.log" Feb 02 09:49:55 crc kubenswrapper[4747]: I0202 09:49:55.843698 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bxvq_a977f70f-0d8f-4480-be1f-0b48d191b054/extract-utilities/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.034426 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bxvq_a977f70f-0d8f-4480-be1f-0b48d191b054/extract-utilities/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.051637 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bxvq_a977f70f-0d8f-4480-be1f-0b48d191b054/extract-content/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.074507 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bxvq_a977f70f-0d8f-4480-be1f-0b48d191b054/extract-content/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.226880 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bxvq_a977f70f-0d8f-4480-be1f-0b48d191b054/extract-utilities/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.238995 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bxvq_a977f70f-0d8f-4480-be1f-0b48d191b054/extract-content/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.434122 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8kk9w_72a35cfa-73ba-4baf-b7c0-2947ca69a797/extract-utilities/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.660920 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8kk9w_72a35cfa-73ba-4baf-b7c0-2947ca69a797/extract-content/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.673117 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8kk9w_72a35cfa-73ba-4baf-b7c0-2947ca69a797/extract-utilities/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.674706 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bxvq_a977f70f-0d8f-4480-be1f-0b48d191b054/registry-server/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.710951 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8kk9w_72a35cfa-73ba-4baf-b7c0-2947ca69a797/extract-content/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.865095 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8kk9w_72a35cfa-73ba-4baf-b7c0-2947ca69a797/extract-utilities/0.log" Feb 02 09:49:56 crc kubenswrapper[4747]: I0202 09:49:56.882156 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8kk9w_72a35cfa-73ba-4baf-b7c0-2947ca69a797/extract-content/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.076187 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-xs49m_0f85f49d-a5d1-4b38-965c-e02d64134491/marketplace-operator/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.195567 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xhhvr_d487e4d8-ed4a-4adf-b849-70df6155b1e4/extract-utilities/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.378984 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xhhvr_d487e4d8-ed4a-4adf-b849-70df6155b1e4/extract-utilities/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.396505 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xhhvr_d487e4d8-ed4a-4adf-b849-70df6155b1e4/extract-content/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.424075 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xhhvr_d487e4d8-ed4a-4adf-b849-70df6155b1e4/extract-content/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.426836 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8kk9w_72a35cfa-73ba-4baf-b7c0-2947ca69a797/registry-server/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.549121 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xhhvr_d487e4d8-ed4a-4adf-b849-70df6155b1e4/extract-utilities/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.581369 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xhhvr_d487e4d8-ed4a-4adf-b849-70df6155b1e4/extract-content/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.719149 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xhhvr_d487e4d8-ed4a-4adf-b849-70df6155b1e4/registry-server/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.739539 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd55r_0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a/extract-utilities/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.922889 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd55r_0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a/extract-content/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.938213 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd55r_0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a/extract-utilities/0.log" Feb 02 09:49:57 crc kubenswrapper[4747]: I0202 09:49:57.955468 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd55r_0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a/extract-content/0.log" Feb 02 09:49:58 crc kubenswrapper[4747]: I0202 09:49:58.124813 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd55r_0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a/extract-utilities/0.log" Feb 02 09:49:58 crc kubenswrapper[4747]: I0202 09:49:58.139490 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd55r_0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a/extract-content/0.log" Feb 02 09:49:58 crc kubenswrapper[4747]: I0202 09:49:58.498945 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vd55r_0a8d14ff-b532-41f9-bfdb-b6cda9a16e5a/registry-server/0.log" Feb 02 09:50:09 crc kubenswrapper[4747]: I0202 09:50:09.339879 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:50:09 crc kubenswrapper[4747]: E0202 09:50:09.340705 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:50:20 crc kubenswrapper[4747]: I0202 09:50:20.343230 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:50:20 crc kubenswrapper[4747]: E0202 09:50:20.343983 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g8f8b_openshift-machine-config-operator(1fadfd97-1567-40c1-a5e7-98ed7e3d67d6)\"" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.803254 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-84jtf"] Feb 02 09:50:29 crc kubenswrapper[4747]: E0202 09:50:29.804280 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7798af34-cba4-4584-bfd2-18369d05ef39" containerName="container-00" Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.804299 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7798af34-cba4-4584-bfd2-18369d05ef39" containerName="container-00" Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.804541 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7798af34-cba4-4584-bfd2-18369d05ef39" containerName="container-00" Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.805975 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.829105 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-84jtf"] Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.901264 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-utilities\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.901344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-catalog-content\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:29 crc kubenswrapper[4747]: I0202 09:50:29.901496 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbtbs\" (UniqueName: \"kubernetes.io/projected/e75b9697-9ead-49fe-b5e0-b1d2a001129a-kube-api-access-cbtbs\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.003523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-utilities\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.003590 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-catalog-content\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.003651 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbtbs\" (UniqueName: \"kubernetes.io/projected/e75b9697-9ead-49fe-b5e0-b1d2a001129a-kube-api-access-cbtbs\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.004199 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-utilities\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.004253 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-catalog-content\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.024098 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbtbs\" (UniqueName: \"kubernetes.io/projected/e75b9697-9ead-49fe-b5e0-b1d2a001129a-kube-api-access-cbtbs\") pod \"certified-operators-84jtf\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.167438 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:30 crc kubenswrapper[4747]: I0202 09:50:30.699068 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-84jtf"] Feb 02 09:50:31 crc kubenswrapper[4747]: I0202 09:50:31.186328 4747 generic.go:334] "Generic (PLEG): container finished" podID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerID="da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10" exitCode=0 Feb 02 09:50:31 crc kubenswrapper[4747]: I0202 09:50:31.186623 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84jtf" event={"ID":"e75b9697-9ead-49fe-b5e0-b1d2a001129a","Type":"ContainerDied","Data":"da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10"} Feb 02 09:50:31 crc kubenswrapper[4747]: I0202 09:50:31.186671 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84jtf" event={"ID":"e75b9697-9ead-49fe-b5e0-b1d2a001129a","Type":"ContainerStarted","Data":"81f1d422ea98b7ef7f0131126948e64488194a6bda67bdd0aa3420baec4836b1"} Feb 02 09:50:31 crc kubenswrapper[4747]: I0202 09:50:31.193731 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 09:50:31 crc kubenswrapper[4747]: I0202 09:50:31.339891 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:50:32 crc kubenswrapper[4747]: I0202 09:50:32.220809 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"8f5ada8c177a83d2798fbbb06dadafaceecb253c3f3c11066434fdcda3a262b6"} Feb 02 09:50:32 crc kubenswrapper[4747]: I0202 09:50:32.230036 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84jtf" event={"ID":"e75b9697-9ead-49fe-b5e0-b1d2a001129a","Type":"ContainerStarted","Data":"e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805"} Feb 02 09:50:33 crc kubenswrapper[4747]: I0202 09:50:33.240676 4747 generic.go:334] "Generic (PLEG): container finished" podID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerID="e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805" exitCode=0 Feb 02 09:50:33 crc kubenswrapper[4747]: I0202 09:50:33.240741 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84jtf" event={"ID":"e75b9697-9ead-49fe-b5e0-b1d2a001129a","Type":"ContainerDied","Data":"e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805"} Feb 02 09:50:34 crc kubenswrapper[4747]: I0202 09:50:34.254619 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84jtf" event={"ID":"e75b9697-9ead-49fe-b5e0-b1d2a001129a","Type":"ContainerStarted","Data":"56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe"} Feb 02 09:50:34 crc kubenswrapper[4747]: I0202 09:50:34.280526 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-84jtf" podStartSLOduration=2.817983203 podStartE2EDuration="5.280505377s" podCreationTimestamp="2026-02-02 09:50:29 +0000 UTC" firstStartedPulling="2026-02-02 09:50:31.190819718 +0000 UTC m=+3243.735158161" lastFinishedPulling="2026-02-02 09:50:33.653341902 +0000 UTC m=+3246.197680335" observedRunningTime="2026-02-02 09:50:34.273064911 +0000 UTC m=+3246.817403354" watchObservedRunningTime="2026-02-02 09:50:34.280505377 +0000 UTC m=+3246.824843820" Feb 02 09:50:40 crc kubenswrapper[4747]: I0202 09:50:40.167625 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:40 crc kubenswrapper[4747]: I0202 09:50:40.168065 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:40 crc kubenswrapper[4747]: I0202 09:50:40.240367 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:40 crc kubenswrapper[4747]: I0202 09:50:40.355121 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:40 crc kubenswrapper[4747]: I0202 09:50:40.484386 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-84jtf"] Feb 02 09:50:42 crc kubenswrapper[4747]: I0202 09:50:42.320720 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-84jtf" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="registry-server" containerID="cri-o://56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe" gracePeriod=2 Feb 02 09:50:42 crc kubenswrapper[4747]: I0202 09:50:42.841767 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:42 crc kubenswrapper[4747]: I0202 09:50:42.956395 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-utilities\") pod \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " Feb 02 09:50:42 crc kubenswrapper[4747]: I0202 09:50:42.956487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-catalog-content\") pod \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " Feb 02 09:50:42 crc kubenswrapper[4747]: I0202 09:50:42.956675 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbtbs\" (UniqueName: \"kubernetes.io/projected/e75b9697-9ead-49fe-b5e0-b1d2a001129a-kube-api-access-cbtbs\") pod \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\" (UID: \"e75b9697-9ead-49fe-b5e0-b1d2a001129a\") " Feb 02 09:50:42 crc kubenswrapper[4747]: I0202 09:50:42.957446 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-utilities" (OuterVolumeSpecName: "utilities") pod "e75b9697-9ead-49fe-b5e0-b1d2a001129a" (UID: "e75b9697-9ead-49fe-b5e0-b1d2a001129a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:50:42 crc kubenswrapper[4747]: I0202 09:50:42.964075 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e75b9697-9ead-49fe-b5e0-b1d2a001129a-kube-api-access-cbtbs" (OuterVolumeSpecName: "kube-api-access-cbtbs") pod "e75b9697-9ead-49fe-b5e0-b1d2a001129a" (UID: "e75b9697-9ead-49fe-b5e0-b1d2a001129a"). InnerVolumeSpecName "kube-api-access-cbtbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.009221 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e75b9697-9ead-49fe-b5e0-b1d2a001129a" (UID: "e75b9697-9ead-49fe-b5e0-b1d2a001129a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.058994 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbtbs\" (UniqueName: \"kubernetes.io/projected/e75b9697-9ead-49fe-b5e0-b1d2a001129a-kube-api-access-cbtbs\") on node \"crc\" DevicePath \"\"" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.059034 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.059048 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e75b9697-9ead-49fe-b5e0-b1d2a001129a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.331483 4747 generic.go:334] "Generic (PLEG): container finished" podID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerID="56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe" exitCode=0 Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.331525 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84jtf" event={"ID":"e75b9697-9ead-49fe-b5e0-b1d2a001129a","Type":"ContainerDied","Data":"56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe"} Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.331557 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84jtf" event={"ID":"e75b9697-9ead-49fe-b5e0-b1d2a001129a","Type":"ContainerDied","Data":"81f1d422ea98b7ef7f0131126948e64488194a6bda67bdd0aa3420baec4836b1"} Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.331586 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84jtf" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.331591 4747 scope.go:117] "RemoveContainer" containerID="56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.364600 4747 scope.go:117] "RemoveContainer" containerID="e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.383642 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-84jtf"] Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.392595 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-84jtf"] Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.413771 4747 scope.go:117] "RemoveContainer" containerID="da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.441663 4747 scope.go:117] "RemoveContainer" containerID="56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe" Feb 02 09:50:43 crc kubenswrapper[4747]: E0202 09:50:43.442529 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe\": container with ID starting with 56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe not found: ID does not exist" containerID="56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.442662 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe"} err="failed to get container status \"56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe\": rpc error: code = NotFound desc = could not find container \"56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe\": container with ID starting with 56fe50cd639dde46577df0b434ac02316569ca946b9b8bb770bfc4554cbd8ebe not found: ID does not exist" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.442706 4747 scope.go:117] "RemoveContainer" containerID="e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805" Feb 02 09:50:43 crc kubenswrapper[4747]: E0202 09:50:43.443192 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805\": container with ID starting with e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805 not found: ID does not exist" containerID="e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.443233 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805"} err="failed to get container status \"e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805\": rpc error: code = NotFound desc = could not find container \"e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805\": container with ID starting with e6cf960efd241c040199599bb278ba7c7282b12708c94616d7549d7869a87805 not found: ID does not exist" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.443263 4747 scope.go:117] "RemoveContainer" containerID="da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10" Feb 02 09:50:43 crc kubenswrapper[4747]: E0202 09:50:43.443538 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10\": container with ID starting with da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10 not found: ID does not exist" containerID="da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10" Feb 02 09:50:43 crc kubenswrapper[4747]: I0202 09:50:43.443568 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10"} err="failed to get container status \"da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10\": rpc error: code = NotFound desc = could not find container \"da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10\": container with ID starting with da97bd10f930f3bf61aa77183019e8d30b2748504cb65705a71eff11cff2ae10 not found: ID does not exist" Feb 02 09:50:44 crc kubenswrapper[4747]: I0202 09:50:44.351888 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" path="/var/lib/kubelet/pods/e75b9697-9ead-49fe-b5e0-b1d2a001129a/volumes" Feb 02 09:51:40 crc kubenswrapper[4747]: I0202 09:51:40.870728 4747 generic.go:334] "Generic (PLEG): container finished" podID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerID="65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b" exitCode=0 Feb 02 09:51:40 crc kubenswrapper[4747]: I0202 09:51:40.870845 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhpmv/must-gather-srbbm" event={"ID":"894bd1e5-3142-4804-8cb1-1afc854a92f1","Type":"ContainerDied","Data":"65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b"} Feb 02 09:51:40 crc kubenswrapper[4747]: I0202 09:51:40.872319 4747 scope.go:117] "RemoveContainer" containerID="65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b" Feb 02 09:51:41 crc kubenswrapper[4747]: I0202 09:51:41.560474 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xhpmv_must-gather-srbbm_894bd1e5-3142-4804-8cb1-1afc854a92f1/gather/0.log" Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.394117 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xhpmv/must-gather-srbbm"] Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.394933 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-xhpmv/must-gather-srbbm" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerName="copy" containerID="cri-o://9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c" gracePeriod=2 Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.403708 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xhpmv/must-gather-srbbm"] Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.915322 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xhpmv_must-gather-srbbm_894bd1e5-3142-4804-8cb1-1afc854a92f1/copy/0.log" Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.916103 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.955702 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xhpmv_must-gather-srbbm_894bd1e5-3142-4804-8cb1-1afc854a92f1/copy/0.log" Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.956234 4747 generic.go:334] "Generic (PLEG): container finished" podID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerID="9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c" exitCode=143 Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.956278 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhpmv/must-gather-srbbm" Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.956300 4747 scope.go:117] "RemoveContainer" containerID="9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c" Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.969661 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnmpk\" (UniqueName: \"kubernetes.io/projected/894bd1e5-3142-4804-8cb1-1afc854a92f1-kube-api-access-wnmpk\") pod \"894bd1e5-3142-4804-8cb1-1afc854a92f1\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.969902 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/894bd1e5-3142-4804-8cb1-1afc854a92f1-must-gather-output\") pod \"894bd1e5-3142-4804-8cb1-1afc854a92f1\" (UID: \"894bd1e5-3142-4804-8cb1-1afc854a92f1\") " Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.979210 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894bd1e5-3142-4804-8cb1-1afc854a92f1-kube-api-access-wnmpk" (OuterVolumeSpecName: "kube-api-access-wnmpk") pod "894bd1e5-3142-4804-8cb1-1afc854a92f1" (UID: "894bd1e5-3142-4804-8cb1-1afc854a92f1"). InnerVolumeSpecName "kube-api-access-wnmpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:51:49 crc kubenswrapper[4747]: I0202 09:51:49.994340 4747 scope.go:117] "RemoveContainer" containerID="65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.071901 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnmpk\" (UniqueName: \"kubernetes.io/projected/894bd1e5-3142-4804-8cb1-1afc854a92f1-kube-api-access-wnmpk\") on node \"crc\" DevicePath \"\"" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.105462 4747 scope.go:117] "RemoveContainer" containerID="9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c" Feb 02 09:51:50 crc kubenswrapper[4747]: E0202 09:51:50.105922 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c\": container with ID starting with 9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c not found: ID does not exist" containerID="9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.106060 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c"} err="failed to get container status \"9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c\": rpc error: code = NotFound desc = could not find container \"9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c\": container with ID starting with 9861230d58c261caa5f9579bf9d238fa644403437b5dd9c1dec1c326c6e1054c not found: ID does not exist" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.106186 4747 scope.go:117] "RemoveContainer" containerID="65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b" Feb 02 09:51:50 crc kubenswrapper[4747]: E0202 09:51:50.106574 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b\": container with ID starting with 65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b not found: ID does not exist" containerID="65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.106617 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b"} err="failed to get container status \"65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b\": rpc error: code = NotFound desc = could not find container \"65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b\": container with ID starting with 65583dd6e31206570b29f309ff9231b19ef017c0a4f318458d5b83c4adfcfa1b not found: ID does not exist" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.164042 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894bd1e5-3142-4804-8cb1-1afc854a92f1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "894bd1e5-3142-4804-8cb1-1afc854a92f1" (UID: "894bd1e5-3142-4804-8cb1-1afc854a92f1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.174106 4747 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/894bd1e5-3142-4804-8cb1-1afc854a92f1-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 09:51:50 crc kubenswrapper[4747]: I0202 09:51:50.382681 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" path="/var/lib/kubelet/pods/894bd1e5-3142-4804-8cb1-1afc854a92f1/volumes" Feb 02 09:52:50 crc kubenswrapper[4747]: I0202 09:52:50.518597 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:52:50 crc kubenswrapper[4747]: I0202 09:52:50.519294 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:53:08 crc kubenswrapper[4747]: I0202 09:53:08.409614 4747 scope.go:117] "RemoveContainer" containerID="5087cb09d67073ab5543f67f69d64302e7dd6a1238c1b27d8b32dea88ad09714" Feb 02 09:53:08 crc kubenswrapper[4747]: I0202 09:53:08.448398 4747 scope.go:117] "RemoveContainer" containerID="00f08a302c2bb935331c1b45d6b1c994caac516912711dc65fb3e59aa3d42c1e" Feb 02 09:53:08 crc kubenswrapper[4747]: I0202 09:53:08.506054 4747 scope.go:117] "RemoveContainer" containerID="851e11d0d4e72df59d0edccde6c5b1ce3ff3f9ae693c153587c5a83da207712b" Feb 02 09:53:20 crc kubenswrapper[4747]: I0202 09:53:20.518368 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:53:20 crc kubenswrapper[4747]: I0202 09:53:20.518955 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.399105 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-49n4p"] Feb 02 09:53:23 crc kubenswrapper[4747]: E0202 09:53:23.399838 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="extract-utilities" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.399855 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="extract-utilities" Feb 02 09:53:23 crc kubenswrapper[4747]: E0202 09:53:23.399867 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="registry-server" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.399876 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="registry-server" Feb 02 09:53:23 crc kubenswrapper[4747]: E0202 09:53:23.399892 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerName="gather" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.399900 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerName="gather" Feb 02 09:53:23 crc kubenswrapper[4747]: E0202 09:53:23.399928 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerName="copy" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.399953 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerName="copy" Feb 02 09:53:23 crc kubenswrapper[4747]: E0202 09:53:23.399978 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="extract-content" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.399986 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="extract-content" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.400240 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e75b9697-9ead-49fe-b5e0-b1d2a001129a" containerName="registry-server" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.400255 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerName="gather" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.400270 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="894bd1e5-3142-4804-8cb1-1afc854a92f1" containerName="copy" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.404269 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.411433 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-49n4p"] Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.438319 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-catalog-content\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.438420 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hh9c\" (UniqueName: \"kubernetes.io/projected/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-kube-api-access-9hh9c\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.438477 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-utilities\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.540332 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-catalog-content\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.540398 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hh9c\" (UniqueName: \"kubernetes.io/projected/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-kube-api-access-9hh9c\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.540436 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-utilities\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.540909 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-catalog-content\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.541289 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-utilities\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.561485 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hh9c\" (UniqueName: \"kubernetes.io/projected/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-kube-api-access-9hh9c\") pod \"redhat-marketplace-49n4p\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:23 crc kubenswrapper[4747]: I0202 09:53:23.742013 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:24 crc kubenswrapper[4747]: I0202 09:53:24.221899 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-49n4p"] Feb 02 09:53:24 crc kubenswrapper[4747]: I0202 09:53:24.773597 4747 generic.go:334] "Generic (PLEG): container finished" podID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerID="0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44" exitCode=0 Feb 02 09:53:24 crc kubenswrapper[4747]: I0202 09:53:24.773670 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-49n4p" event={"ID":"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a","Type":"ContainerDied","Data":"0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44"} Feb 02 09:53:24 crc kubenswrapper[4747]: I0202 09:53:24.773894 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-49n4p" event={"ID":"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a","Type":"ContainerStarted","Data":"9958831dd5a2a9e16955c688d028ded309e61ef8d549429c3200539c52ca46e4"} Feb 02 09:53:25 crc kubenswrapper[4747]: I0202 09:53:25.787246 4747 generic.go:334] "Generic (PLEG): container finished" podID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerID="d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c" exitCode=0 Feb 02 09:53:25 crc kubenswrapper[4747]: I0202 09:53:25.787322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-49n4p" event={"ID":"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a","Type":"ContainerDied","Data":"d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c"} Feb 02 09:53:26 crc kubenswrapper[4747]: I0202 09:53:26.798184 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-49n4p" event={"ID":"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a","Type":"ContainerStarted","Data":"9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783"} Feb 02 09:53:26 crc kubenswrapper[4747]: I0202 09:53:26.824195 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-49n4p" podStartSLOduration=2.414186466 podStartE2EDuration="3.824168629s" podCreationTimestamp="2026-02-02 09:53:23 +0000 UTC" firstStartedPulling="2026-02-02 09:53:24.775832292 +0000 UTC m=+3417.320170725" lastFinishedPulling="2026-02-02 09:53:26.185814435 +0000 UTC m=+3418.730152888" observedRunningTime="2026-02-02 09:53:26.813627496 +0000 UTC m=+3419.357965939" watchObservedRunningTime="2026-02-02 09:53:26.824168629 +0000 UTC m=+3419.368507062" Feb 02 09:53:33 crc kubenswrapper[4747]: I0202 09:53:33.743108 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:33 crc kubenswrapper[4747]: I0202 09:53:33.743693 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:33 crc kubenswrapper[4747]: I0202 09:53:33.808651 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:33 crc kubenswrapper[4747]: I0202 09:53:33.913412 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:34 crc kubenswrapper[4747]: I0202 09:53:34.043277 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-49n4p"] Feb 02 09:53:35 crc kubenswrapper[4747]: I0202 09:53:35.889241 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-49n4p" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="registry-server" containerID="cri-o://9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783" gracePeriod=2 Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.444946 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.582159 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hh9c\" (UniqueName: \"kubernetes.io/projected/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-kube-api-access-9hh9c\") pod \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.582205 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-utilities\") pod \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.582236 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-catalog-content\") pod \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\" (UID: \"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a\") " Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.583359 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-utilities" (OuterVolumeSpecName: "utilities") pod "bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" (UID: "bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.584220 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.588962 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-kube-api-access-9hh9c" (OuterVolumeSpecName: "kube-api-access-9hh9c") pod "bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" (UID: "bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a"). InnerVolumeSpecName "kube-api-access-9hh9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.605250 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" (UID: "bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.686093 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hh9c\" (UniqueName: \"kubernetes.io/projected/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-kube-api-access-9hh9c\") on node \"crc\" DevicePath \"\"" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.686127 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.900511 4747 generic.go:334] "Generic (PLEG): container finished" podID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerID="9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783" exitCode=0 Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.900555 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-49n4p" event={"ID":"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a","Type":"ContainerDied","Data":"9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783"} Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.900583 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-49n4p" event={"ID":"bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a","Type":"ContainerDied","Data":"9958831dd5a2a9e16955c688d028ded309e61ef8d549429c3200539c52ca46e4"} Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.900587 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-49n4p" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.900601 4747 scope.go:117] "RemoveContainer" containerID="9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.931049 4747 scope.go:117] "RemoveContainer" containerID="d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.945584 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-49n4p"] Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.958603 4747 scope.go:117] "RemoveContainer" containerID="0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44" Feb 02 09:53:36 crc kubenswrapper[4747]: I0202 09:53:36.963222 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-49n4p"] Feb 02 09:53:37 crc kubenswrapper[4747]: I0202 09:53:37.004210 4747 scope.go:117] "RemoveContainer" containerID="9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783" Feb 02 09:53:37 crc kubenswrapper[4747]: E0202 09:53:37.004580 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783\": container with ID starting with 9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783 not found: ID does not exist" containerID="9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783" Feb 02 09:53:37 crc kubenswrapper[4747]: I0202 09:53:37.004617 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783"} err="failed to get container status \"9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783\": rpc error: code = NotFound desc = could not find container \"9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783\": container with ID starting with 9c9e62de86eac856b96f12b2918a0a230e7c46c3ce50968182871b9281d07783 not found: ID does not exist" Feb 02 09:53:37 crc kubenswrapper[4747]: I0202 09:53:37.004645 4747 scope.go:117] "RemoveContainer" containerID="d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c" Feb 02 09:53:37 crc kubenswrapper[4747]: E0202 09:53:37.005272 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c\": container with ID starting with d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c not found: ID does not exist" containerID="d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c" Feb 02 09:53:37 crc kubenswrapper[4747]: I0202 09:53:37.005300 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c"} err="failed to get container status \"d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c\": rpc error: code = NotFound desc = could not find container \"d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c\": container with ID starting with d6c30ac9aefa3bd75f4237571cd809157de607f5f31b82aab81ddd0390fa7e9c not found: ID does not exist" Feb 02 09:53:37 crc kubenswrapper[4747]: I0202 09:53:37.005319 4747 scope.go:117] "RemoveContainer" containerID="0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44" Feb 02 09:53:37 crc kubenswrapper[4747]: E0202 09:53:37.005626 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44\": container with ID starting with 0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44 not found: ID does not exist" containerID="0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44" Feb 02 09:53:37 crc kubenswrapper[4747]: I0202 09:53:37.005688 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44"} err="failed to get container status \"0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44\": rpc error: code = NotFound desc = could not find container \"0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44\": container with ID starting with 0268d383cd2f2bc68daf88a8bd14fc19cabf1f355eca3c9c3de2eef8f0f55c44 not found: ID does not exist" Feb 02 09:53:38 crc kubenswrapper[4747]: I0202 09:53:38.351964 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" path="/var/lib/kubelet/pods/bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a/volumes" Feb 02 09:53:50 crc kubenswrapper[4747]: I0202 09:53:50.518232 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:53:50 crc kubenswrapper[4747]: I0202 09:53:50.518802 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:53:50 crc kubenswrapper[4747]: I0202 09:53:50.518839 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" Feb 02 09:53:50 crc kubenswrapper[4747]: I0202 09:53:50.519629 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8f5ada8c177a83d2798fbbb06dadafaceecb253c3f3c11066434fdcda3a262b6"} pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 09:53:50 crc kubenswrapper[4747]: I0202 09:53:50.519678 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" containerID="cri-o://8f5ada8c177a83d2798fbbb06dadafaceecb253c3f3c11066434fdcda3a262b6" gracePeriod=600 Feb 02 09:53:51 crc kubenswrapper[4747]: I0202 09:53:51.035471 4747 generic.go:334] "Generic (PLEG): container finished" podID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerID="8f5ada8c177a83d2798fbbb06dadafaceecb253c3f3c11066434fdcda3a262b6" exitCode=0 Feb 02 09:53:51 crc kubenswrapper[4747]: I0202 09:53:51.035560 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerDied","Data":"8f5ada8c177a83d2798fbbb06dadafaceecb253c3f3c11066434fdcda3a262b6"} Feb 02 09:53:51 crc kubenswrapper[4747]: I0202 09:53:51.035968 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" event={"ID":"1fadfd97-1567-40c1-a5e7-98ed7e3d67d6","Type":"ContainerStarted","Data":"685bfe537ccf710b9508e9f330f859af3a3c2306bca09d0a3b2fb9ab5e57594b"} Feb 02 09:53:51 crc kubenswrapper[4747]: I0202 09:53:51.036000 4747 scope.go:117] "RemoveContainer" containerID="9082ee98cd8a67cd9279e94a268452e10ce23ef49305a2adcdc522973be82efe" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.230023 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w9fr7"] Feb 02 09:55:10 crc kubenswrapper[4747]: E0202 09:55:10.231234 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="registry-server" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.231249 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="registry-server" Feb 02 09:55:10 crc kubenswrapper[4747]: E0202 09:55:10.231277 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="extract-utilities" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.231284 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="extract-utilities" Feb 02 09:55:10 crc kubenswrapper[4747]: E0202 09:55:10.231303 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="extract-content" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.231309 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="extract-content" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.231489 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf8d764a-34d4-4fa7-8e7a-e7e1b5eabb1a" containerName="registry-server" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.232801 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.255614 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w9fr7"] Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.410805 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-catalog-content\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.410874 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv6bs\" (UniqueName: \"kubernetes.io/projected/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-kube-api-access-gv6bs\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.411204 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-utilities\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.513108 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-utilities\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.513207 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-catalog-content\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.513273 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv6bs\" (UniqueName: \"kubernetes.io/projected/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-kube-api-access-gv6bs\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.513881 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-catalog-content\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.514199 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-utilities\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.533741 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv6bs\" (UniqueName: \"kubernetes.io/projected/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-kube-api-access-gv6bs\") pod \"redhat-operators-w9fr7\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:10 crc kubenswrapper[4747]: I0202 09:55:10.579037 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:11 crc kubenswrapper[4747]: I0202 09:55:11.074468 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w9fr7"] Feb 02 09:55:11 crc kubenswrapper[4747]: I0202 09:55:11.964990 4747 generic.go:334] "Generic (PLEG): container finished" podID="ef19e1dd-542d-40b9-a3f0-d96264c2bdca" containerID="b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9" exitCode=0 Feb 02 09:55:11 crc kubenswrapper[4747]: I0202 09:55:11.965076 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w9fr7" event={"ID":"ef19e1dd-542d-40b9-a3f0-d96264c2bdca","Type":"ContainerDied","Data":"b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9"} Feb 02 09:55:11 crc kubenswrapper[4747]: I0202 09:55:11.965715 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w9fr7" event={"ID":"ef19e1dd-542d-40b9-a3f0-d96264c2bdca","Type":"ContainerStarted","Data":"3841bdf99fcf4819427450a503c67bf178f60735208734853a5c2d94ca7d1ecf"} Feb 02 09:55:12 crc kubenswrapper[4747]: I0202 09:55:12.980105 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w9fr7" event={"ID":"ef19e1dd-542d-40b9-a3f0-d96264c2bdca","Type":"ContainerStarted","Data":"95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60"} Feb 02 09:55:13 crc kubenswrapper[4747]: I0202 09:55:13.992511 4747 generic.go:334] "Generic (PLEG): container finished" podID="ef19e1dd-542d-40b9-a3f0-d96264c2bdca" containerID="95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60" exitCode=0 Feb 02 09:55:13 crc kubenswrapper[4747]: I0202 09:55:13.992558 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w9fr7" event={"ID":"ef19e1dd-542d-40b9-a3f0-d96264c2bdca","Type":"ContainerDied","Data":"95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60"} Feb 02 09:55:15 crc kubenswrapper[4747]: I0202 09:55:15.002720 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w9fr7" event={"ID":"ef19e1dd-542d-40b9-a3f0-d96264c2bdca","Type":"ContainerStarted","Data":"fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a"} Feb 02 09:55:15 crc kubenswrapper[4747]: I0202 09:55:15.027142 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w9fr7" podStartSLOduration=2.550101703 podStartE2EDuration="5.027119391s" podCreationTimestamp="2026-02-02 09:55:10 +0000 UTC" firstStartedPulling="2026-02-02 09:55:11.968268781 +0000 UTC m=+3524.512607214" lastFinishedPulling="2026-02-02 09:55:14.445286469 +0000 UTC m=+3526.989624902" observedRunningTime="2026-02-02 09:55:15.022203358 +0000 UTC m=+3527.566541791" watchObservedRunningTime="2026-02-02 09:55:15.027119391 +0000 UTC m=+3527.571457824" Feb 02 09:55:20 crc kubenswrapper[4747]: I0202 09:55:20.580144 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:20 crc kubenswrapper[4747]: I0202 09:55:20.581138 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:20 crc kubenswrapper[4747]: I0202 09:55:20.639527 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:21 crc kubenswrapper[4747]: I0202 09:55:21.112321 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:21 crc kubenswrapper[4747]: I0202 09:55:21.164558 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w9fr7"] Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.078716 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w9fr7" podUID="ef19e1dd-542d-40b9-a3f0-d96264c2bdca" containerName="registry-server" containerID="cri-o://fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a" gracePeriod=2 Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.514477 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.588055 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gv6bs\" (UniqueName: \"kubernetes.io/projected/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-kube-api-access-gv6bs\") pod \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.588269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-utilities\") pod \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.588409 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-catalog-content\") pod \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\" (UID: \"ef19e1dd-542d-40b9-a3f0-d96264c2bdca\") " Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.589256 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-utilities" (OuterVolumeSpecName: "utilities") pod "ef19e1dd-542d-40b9-a3f0-d96264c2bdca" (UID: "ef19e1dd-542d-40b9-a3f0-d96264c2bdca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.595258 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-kube-api-access-gv6bs" (OuterVolumeSpecName: "kube-api-access-gv6bs") pod "ef19e1dd-542d-40b9-a3f0-d96264c2bdca" (UID: "ef19e1dd-542d-40b9-a3f0-d96264c2bdca"). InnerVolumeSpecName "kube-api-access-gv6bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.690990 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 09:55:23 crc kubenswrapper[4747]: I0202 09:55:23.691025 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gv6bs\" (UniqueName: \"kubernetes.io/projected/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-kube-api-access-gv6bs\") on node \"crc\" DevicePath \"\"" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.089438 4747 generic.go:334] "Generic (PLEG): container finished" podID="ef19e1dd-542d-40b9-a3f0-d96264c2bdca" containerID="fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a" exitCode=0 Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.089502 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w9fr7" event={"ID":"ef19e1dd-542d-40b9-a3f0-d96264c2bdca","Type":"ContainerDied","Data":"fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a"} Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.089760 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w9fr7" event={"ID":"ef19e1dd-542d-40b9-a3f0-d96264c2bdca","Type":"ContainerDied","Data":"3841bdf99fcf4819427450a503c67bf178f60735208734853a5c2d94ca7d1ecf"} Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.089788 4747 scope.go:117] "RemoveContainer" containerID="fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.089517 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w9fr7" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.107194 4747 scope.go:117] "RemoveContainer" containerID="95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.125884 4747 scope.go:117] "RemoveContainer" containerID="b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.164986 4747 scope.go:117] "RemoveContainer" containerID="fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a" Feb 02 09:55:24 crc kubenswrapper[4747]: E0202 09:55:24.165409 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a\": container with ID starting with fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a not found: ID does not exist" containerID="fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.165452 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a"} err="failed to get container status \"fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a\": rpc error: code = NotFound desc = could not find container \"fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a\": container with ID starting with fe24f60a19a3f4df98f6db4cec6644622815a4cf1191b24db30bfe1fd1f4ed5a not found: ID does not exist" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.165481 4747 scope.go:117] "RemoveContainer" containerID="95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60" Feb 02 09:55:24 crc kubenswrapper[4747]: E0202 09:55:24.165982 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60\": container with ID starting with 95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60 not found: ID does not exist" containerID="95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.166035 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60"} err="failed to get container status \"95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60\": rpc error: code = NotFound desc = could not find container \"95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60\": container with ID starting with 95a20549d5051010493c050b659012bd2677b4f5e697ddd6d159ea1a81bd9d60 not found: ID does not exist" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.166063 4747 scope.go:117] "RemoveContainer" containerID="b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9" Feb 02 09:55:24 crc kubenswrapper[4747]: E0202 09:55:24.166528 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9\": container with ID starting with b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9 not found: ID does not exist" containerID="b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9" Feb 02 09:55:24 crc kubenswrapper[4747]: I0202 09:55:24.166577 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9"} err="failed to get container status \"b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9\": rpc error: code = NotFound desc = could not find container \"b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9\": container with ID starting with b45156aedce1a598f4add3db9cb9aa2a65f91baf7248963eda00de18711890e9 not found: ID does not exist" Feb 02 09:55:25 crc kubenswrapper[4747]: I0202 09:55:25.238564 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ef19e1dd-542d-40b9-a3f0-d96264c2bdca" (UID: "ef19e1dd-542d-40b9-a3f0-d96264c2bdca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 09:55:25 crc kubenswrapper[4747]: I0202 09:55:25.255706 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef19e1dd-542d-40b9-a3f0-d96264c2bdca-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 09:55:25 crc kubenswrapper[4747]: I0202 09:55:25.326578 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w9fr7"] Feb 02 09:55:25 crc kubenswrapper[4747]: I0202 09:55:25.335638 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w9fr7"] Feb 02 09:55:26 crc kubenswrapper[4747]: I0202 09:55:26.352794 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef19e1dd-542d-40b9-a3f0-d96264c2bdca" path="/var/lib/kubelet/pods/ef19e1dd-542d-40b9-a3f0-d96264c2bdca/volumes" Feb 02 09:55:50 crc kubenswrapper[4747]: I0202 09:55:50.518133 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:55:50 crc kubenswrapper[4747]: I0202 09:55:50.518539 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 09:56:20 crc kubenswrapper[4747]: I0202 09:56:20.518682 4747 patch_prober.go:28] interesting pod/machine-config-daemon-g8f8b container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 09:56:20 crc kubenswrapper[4747]: I0202 09:56:20.519181 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g8f8b" podUID="1fadfd97-1567-40c1-a5e7-98ed7e3d67d6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515140072322024441 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015140072323017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015140062716016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015140062716015457 5ustar corecore